diff --git a/src/ansys/dpf/core/inputs.py b/src/ansys/dpf/core/inputs.py index c981b8b0349..7d06bd75b07 100644 --- a/src/ansys/dpf/core/inputs.py +++ b/src/ansys/dpf/core/inputs.py @@ -24,6 +24,7 @@ from enum import Enum from textwrap import wrap +from typing import Generic, TypeVar import warnings import weakref @@ -31,8 +32,10 @@ from ansys.dpf.core.mapping_types import map_types_to_python from ansys.dpf.core.outputs import Output, _Outputs +T = TypeVar("T") -class Input: + +class Input(Generic[T]): """ Intermediate class internally instantiated by the :class:`ansys.dpf.core.dpf_operator.Operator`. @@ -70,16 +73,8 @@ def __init__(self, spec, pin, operator, count_ellipsis=-1): self.name += str(self._count_ellipsis + 1) self._update_doc_str(docstr, self.name) - def connect(self, inpt): - """Connect any input (entity or operator output) to a specified input pin of this operator. - - Parameters - ---------- - inpt : str, int, double, Field, FieldsContainer, Scoping, DataSources, MeshedRegion, Enum, - Output, Outputs, Operator, os.PathLike - Input of the operator. - - """ + def connect(self, inpt: T): + """Connect any input (entity or operator output) to a specified input pin of this operator.""" from pathlib import Path # always convert ranges to lists @@ -173,7 +168,7 @@ def connect(self, inpt): self.__inc_if_ellipsis() - def __call__(self, inpt): + def __call__(self, inpt: T): """Allow instances to be called like a function.""" self.connect(inpt) @@ -241,13 +236,6 @@ def connect(self, inpt): .. deprecated:: Deprecated in favor of explicit output-to-input connections. - - Parameters - ---------- - inpt : str, int, double, bool, list[int], list[float], Field, FieldsContainer, Scoping, Enum, - ScopingsContainer, MeshedRegion, MeshesContainer, DataSources, CyclicSupport, Outputs, os.PathLike # noqa: E501 - Input of the operator. - """ warnings.warn( message="Use explicit output-to-input connections.", category=DeprecationWarning diff --git a/src/ansys/dpf/core/operators/averaging/elemental_difference.py b/src/ansys/dpf/core/operators/averaging/elemental_difference.py index 78598ed60cb..7eb67b1a33a 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_difference.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_difference.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class elemental_difference(Operator): r"""Transforms an Elemental Nodal or Nodal field into an Elemental field. @@ -199,19 +206,25 @@ class InputsElementalDifference(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_difference._spec().inputs, op) - self._field = Input(elemental_difference._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + elemental_difference._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh_scoping = Input(elemental_difference._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + elemental_difference._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._mesh = Input(elemental_difference._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + elemental_difference._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._through_layers = Input( + self._through_layers: Input[bool] = Input( elemental_difference._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._through_layers) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -232,7 +245,7 @@ def field(self) -> Input: return self._field @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. average only on these entities @@ -253,7 +266,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -272,7 +285,7 @@ def mesh(self) -> Input: return self._mesh @property - def through_layers(self) -> Input: + def through_layers(self) -> Input[bool]: r"""Allows to connect through_layers input to the operator. The maximum elemental difference is taken through the different shell layers if true (default is false). @@ -307,11 +320,13 @@ class OutputsElementalDifference(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_difference._spec().outputs, op) - self._field = Output(elemental_difference._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + elemental_difference._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_difference_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_difference_fc.py index 8b34334140f..d0645f1e125 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_difference_fc.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_difference_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class elemental_difference_fc(Operator): r"""Transforms an Elemental Nodal or Nodal field into an Elemental field. @@ -205,21 +213,25 @@ class InputsElementalDifferenceFc(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_difference_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elemental_difference_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(elemental_difference_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elemental_difference_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._scoping = Input(elemental_difference_fc._spec().input_pin(3), 3, op, -1) + self._scoping: Input[Scoping | ScopingsContainer] = Input( + elemental_difference_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) - self._collapse_shell_layers = Input( + self._collapse_shell_layers: Input[bool] = Input( elemental_difference_fc._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._collapse_shell_layers) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -238,7 +250,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. The mesh region in this pin is used to perform the averaging, used if there is no fields support. @@ -259,7 +271,7 @@ def mesh(self) -> Input: return self._mesh @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect scoping input to the operator. Average only on these elements. If it is scoping container, the label must correspond to the one of the fields container. @@ -280,7 +292,7 @@ def scoping(self) -> Input: return self._scoping @property - def collapse_shell_layers(self) -> Input: + def collapse_shell_layers(self) -> Input[bool]: r"""Allows to connect collapse_shell_layers input to the operator. If true, the data across different shell layers is averaged as well (default is false). @@ -315,13 +327,13 @@ class OutputsElementalDifferenceFc(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_difference_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elemental_difference_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py index c2f1387e2bd..94e7043ba1a 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class elemental_fraction_fc(Operator): r"""Transforms Elemental Nodal fields into Elemental fields. Each elemental @@ -213,23 +219,29 @@ class InputsElementalFractionFc(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_fraction_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elemental_fraction_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(elemental_fraction_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion] = Input( + elemental_fraction_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._scoping = Input(elemental_fraction_fc._spec().input_pin(3), 3, op, -1) + self._scoping: Input[Scoping] = Input( + elemental_fraction_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) - self._denominator = Input(elemental_fraction_fc._spec().input_pin(6), 6, op, -1) + self._denominator: Input[FieldsContainer] = Input( + elemental_fraction_fc._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._denominator) - self._collapse_shell_layers = Input( + self._collapse_shell_layers: Input[bool] = Input( elemental_fraction_fc._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._collapse_shell_layers) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -248,7 +260,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. The mesh region in this pin is used to perform the averaging. It is used if there is no fields support. @@ -269,7 +281,7 @@ def mesh(self) -> Input: return self._mesh @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers. @@ -290,7 +302,7 @@ def scoping(self) -> Input: return self._scoping @property - def denominator(self) -> Input: + def denominator(self) -> Input[FieldsContainer]: r"""Allows to connect denominator input to the operator. If a fields container is set in this pin, it is used as the denominator of the fraction instead of entity_average_fc. @@ -311,7 +323,7 @@ def denominator(self) -> Input: return self._denominator @property - def collapse_shell_layers(self) -> Input: + def collapse_shell_layers(self) -> Input[bool]: r"""Allows to connect collapse_shell_layers input to the operator. If true, the data across different shell layers is averaged as well (default is false). @@ -346,13 +358,13 @@ class OutputsElementalFractionFc(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_fraction_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elemental_fraction_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_mean.py b/src/ansys/dpf/core/operators/averaging/elemental_mean.py index 198df3d5491..86cc7891d18 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_mean.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_mean.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + class elemental_mean(Operator): r"""Computes the average of a multi-entity field, (ElementalNodal -> @@ -195,19 +200,25 @@ class InputsElementalMean(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_mean._spec().inputs, op) - self._field = Input(elemental_mean._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input( + elemental_mean._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._collapse_shell_layers = Input( + self._collapse_shell_layers: Input[bool] = Input( elemental_mean._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._collapse_shell_layers) - self._force_averaging = Input(elemental_mean._spec().input_pin(2), 2, op, -1) + self._force_averaging: Input[bool] = Input( + elemental_mean._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._force_averaging) - self._scoping = Input(elemental_mean._spec().input_pin(3), 3, op, -1) + self._scoping: Input[Scoping] = Input( + elemental_mean._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -226,7 +237,7 @@ def field(self) -> Input: return self._field @property - def collapse_shell_layers(self) -> Input: + def collapse_shell_layers(self) -> Input[bool]: r"""Allows to connect collapse_shell_layers input to the operator. If true, shell layers are averaged as well (default is false). @@ -247,7 +258,7 @@ def collapse_shell_layers(self) -> Input: return self._collapse_shell_layers @property - def force_averaging(self) -> Input: + def force_averaging(self) -> Input[bool]: r"""Allows to connect force_averaging input to the operator. If true you average, if false you just sum. @@ -268,7 +279,7 @@ def force_averaging(self) -> Input: return self._force_averaging @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers. @@ -303,11 +314,11 @@ class OutputsElementalMean(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_mean._spec().outputs, op) - self._field = Output(elemental_mean._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(elemental_mean._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_mean_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_mean_fc.py index 4f44ba4bf73..c9c4bd05bf9 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_mean_fc.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_mean_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class elemental_mean_fc(Operator): r"""Computes the average of a multi-entity container of fields, @@ -257,31 +265,37 @@ class InputsElementalMeanFc(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_mean_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elemental_mean_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._collapse_shell_layers = Input( + self._collapse_shell_layers: Input[bool] = Input( elemental_mean_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._collapse_shell_layers) - self._force_averaging = Input(elemental_mean_fc._spec().input_pin(2), 2, op, -1) + self._force_averaging: Input[bool] = Input( + elemental_mean_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._force_averaging) - self._scoping = Input(elemental_mean_fc._spec().input_pin(3), 3, op, -1) + self._scoping: Input[Scoping | ScopingsContainer] = Input( + elemental_mean_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( elemental_mean_fc._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._merge_solid_shell = Input( + self._merge_solid_shell: Input[bool] = Input( elemental_mean_fc._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._merge_solid_shell) - self._e_shell_layer = Input(elemental_mean_fc._spec().input_pin(27), 27, op, -1) + self._e_shell_layer: Input[int] = Input( + elemental_mean_fc._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._e_shell_layer) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -300,7 +314,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def collapse_shell_layers(self) -> Input: + def collapse_shell_layers(self) -> Input[bool]: r"""Allows to connect collapse_shell_layers input to the operator. If true, the data across different shell layers is averaged as well (default is false). @@ -321,7 +335,7 @@ def collapse_shell_layers(self) -> Input: return self._collapse_shell_layers @property - def force_averaging(self) -> Input: + def force_averaging(self) -> Input[bool]: r"""Allows to connect force_averaging input to the operator. If true you average, if false you just sum. @@ -342,7 +356,7 @@ def force_averaging(self) -> Input: return self._force_averaging @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect scoping input to the operator. Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields container. @@ -363,7 +377,7 @@ def scoping(self) -> Input: return self._scoping @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect abstract_meshed_region input to the operator. The mesh region in this pin is used to perform the averaging. It is used if there is no fields support. @@ -384,7 +398,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def merge_solid_shell(self) -> Input: + def merge_solid_shell(self) -> Input[bool]: r"""Allows to connect merge_solid_shell input to the operator. For shell/solid mixed fields, group in the same field all solids and shells (false by default). This pin only has an effect when collapse_shell_layers is false and a value for e_shell_layer is provided. @@ -405,7 +419,7 @@ def merge_solid_shell(self) -> Input: return self._merge_solid_shell @property - def e_shell_layer(self) -> Input: + def e_shell_layer(self) -> Input[int]: r"""Allows to connect e_shell_layer input to the operator. 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. This pin only has an effect when collapse_shell_layers is false. @@ -440,11 +454,13 @@ class OutputsElementalMeanFc(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_mean_fc._spec().outputs, op) - self._fields_container = Output(elemental_mean_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elemental_mean_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py index 2cbae6d0111..fef119e32fe 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.scoping import Scoping + class elemental_nodal_to_nodal(Operator): r"""Transforms an Elemental Nodal field into a Nodal field using an @@ -236,29 +244,33 @@ class InputsElementalNodalToNodal(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal._spec().inputs, op) - self._field = Input(elemental_nodal_to_nodal._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + elemental_nodal_to_nodal._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( elemental_nodal_to_nodal._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._should_average = Input( + self._should_average: Input[bool] = Input( elemental_nodal_to_nodal._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._should_average) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( elemental_nodal_to_nodal._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) - self._extend_weights_to_mid_nodes = Input( + self._extend_weights_to_mid_nodes: Input[bool] = Input( elemental_nodal_to_nodal._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extend_weights_to_mid_nodes) - self._mesh = Input(elemental_nodal_to_nodal._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + elemental_nodal_to_nodal._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -279,7 +291,7 @@ def field(self) -> Input: return self._field @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. average only on these entities @@ -300,7 +312,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def should_average(self) -> Input: + def should_average(self) -> Input[bool]: r"""Allows to connect should_average input to the operator. Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities). @@ -321,7 +333,7 @@ def should_average(self) -> Input: return self._should_average @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour primary nodes. @@ -342,7 +354,7 @@ def extend_to_mid_nodes(self) -> Input: return self._extend_to_mid_nodes @property - def extend_weights_to_mid_nodes(self) -> Input: + def extend_weights_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_weights_to_mid_nodes input to the operator. Extends weights to mid nodes (when available). Default is false. @@ -363,7 +375,7 @@ def extend_weights_to_mid_nodes(self) -> Input: return self._extend_weights_to_mid_nodes @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -397,13 +409,17 @@ class OutputsElementalNodalToNodal(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal._spec().outputs, op) - self._field = Output(elemental_nodal_to_nodal._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + elemental_nodal_to_nodal._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) - self._weight = Output(elemental_nodal_to_nodal._spec().output_pin(1), 1, op) + self._weight: Output[PropertyField] = Output( + elemental_nodal_to_nodal._spec().output_pin(1), 1, op + ) self._outputs.append(self._weight) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns @@ -421,7 +437,7 @@ def field(self) -> Output: return self._field @property - def weight(self) -> Output: + def weight(self) -> Output[PropertyField]: r"""Allows to get weight output of the operator Provides the number of times it was found in the elemental nodal field, for each node. Can be used to average later. diff --git a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py index 0900632b0b3..0564c833b3a 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class elemental_nodal_to_nodal_elemental(Operator): r"""Transforms an Elemental Nodal field to Nodal Elemental. The result is @@ -161,17 +167,17 @@ class InputsElementalNodalToNodalElemental(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_elemental._spec().inputs, op) - self._field = Input( + self._field: Input[Field | FieldsContainer] = Input( elemental_nodal_to_nodal_elemental._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( elemental_nodal_to_nodal_elemental._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -192,7 +198,7 @@ def field(self) -> Input: return self._field @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -225,13 +231,13 @@ class OutputsElementalNodalToNodalElemental(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_elemental._spec().outputs, op) - self._field = Output( + self._field: Output[Field] = Output( elemental_nodal_to_nodal_elemental._spec().output_pin(0), 0, op ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py index 9eae9fca6d2..6ffee3a2f36 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class elemental_nodal_to_nodal_elemental_fc(Operator): r"""Transforms Elemental Nodal fields to Nodal Elemental fields. The result @@ -162,17 +167,17 @@ class InputsElementalNodalToNodalElementalFc(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_elemental_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elemental_nodal_to_nodal_elemental_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( elemental_nodal_to_nodal_elemental_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -191,7 +196,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -224,13 +229,13 @@ class OutputsElementalNodalToNodalElementalFc(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_elemental_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elemental_nodal_to_nodal_elemental_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py index bfa5cba3b14..3dcd316adda 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class elemental_nodal_to_nodal_fc(Operator): r"""Transforms Elemental Nodal fields into Nodal fields using an averaging @@ -45,7 +53,7 @@ class elemental_nodal_to_nodal_fc(Operator): ------- fields_container: FieldsContainer weights: Class Dataprocessing::Dpftypecollection<Class - Dataprocessing::Cpropertyfield> + Dataprocessing::Cpropertyfield> Gives for each node, the number of times it was found in the Elemental Nodal field. Can be used to average later. Examples @@ -281,39 +289,41 @@ class InputsElementalNodalToNodalFc(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elemental_nodal_to_nodal_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(elemental_nodal_to_nodal_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elemental_nodal_to_nodal_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._should_average = Input( + self._should_average: Input[bool] = Input( elemental_nodal_to_nodal_fc._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._should_average) - self._scoping = Input( + self._scoping: Input[Scoping | ScopingsContainer] = Input( elemental_nodal_to_nodal_fc._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._scoping) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( elemental_nodal_to_nodal_fc._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) - self._extend_weights_to_mid_nodes = Input( + self._extend_weights_to_mid_nodes: Input[bool] = Input( elemental_nodal_to_nodal_fc._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extend_weights_to_mid_nodes) - self._merge_solid_shell = Input( + self._merge_solid_shell: Input[bool] = Input( elemental_nodal_to_nodal_fc._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._merge_solid_shell) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( elemental_nodal_to_nodal_fc._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -332,7 +342,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. The mesh region in this pin is used to perform the averaging. It is used if there is no fields support. @@ -353,7 +363,7 @@ def mesh(self) -> Input: return self._mesh @property - def should_average(self) -> Input: + def should_average(self) -> Input[bool]: r"""Allows to connect should_average input to the operator. Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities). @@ -374,7 +384,7 @@ def should_average(self) -> Input: return self._should_average @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect scoping input to the operator. Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers. @@ -395,7 +405,7 @@ def scoping(self) -> Input: return self._scoping @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour primary nodes. @@ -416,7 +426,7 @@ def extend_to_mid_nodes(self) -> Input: return self._extend_to_mid_nodes @property - def extend_weights_to_mid_nodes(self) -> Input: + def extend_weights_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_weights_to_mid_nodes input to the operator. Extends weights to mid nodes (when available). Default is false. @@ -437,7 +447,7 @@ def extend_weights_to_mid_nodes(self) -> Input: return self._extend_weights_to_mid_nodes @property - def merge_solid_shell(self) -> Input: + def merge_solid_shell(self) -> Input[bool]: r"""Allows to connect merge_solid_shell input to the operator. For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified. @@ -458,7 +468,7 @@ def merge_solid_shell(self) -> Input: return self._merge_solid_shell @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid). @@ -494,15 +504,17 @@ class OutputsElementalNodalToNodalFc(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elemental_nodal_to_nodal_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) - self._weights = Output(elemental_nodal_to_nodal_fc._spec().output_pin(1), 1, op) + self._weights: Output = Output( + elemental_nodal_to_nodal_fc._spec().output_pin(1), 1, op + ) self._outputs.append(self._weights) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py b/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py index f16dbdd943b..9f5c2c533e0 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class elemental_to_elemental_nodal(Operator): r"""Transforms an Elemental field to an Elemental Nodal field. @@ -176,19 +183,21 @@ class InputsElementalToElementalNodal(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_to_elemental_nodal._spec().inputs, op) - self._field = Input( + self._field: Input[Field | FieldsContainer] = Input( elemental_to_elemental_nodal._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( elemental_to_elemental_nodal._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._mesh = Input(elemental_to_elemental_nodal._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + elemental_to_elemental_nodal._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -209,7 +218,7 @@ def field(self) -> Input: return self._field @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. average only on these entities @@ -230,7 +239,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -263,11 +272,13 @@ class OutputsElementalToElementalNodal(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_to_elemental_nodal._spec().outputs, op) - self._field = Output(elemental_to_elemental_nodal._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + elemental_to_elemental_nodal._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py index 55d5ab80d20..cc5d7f5c5b7 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class elemental_to_elemental_nodal_fc(Operator): r"""Transforms Elemental field to Elemental Nodal field. @@ -179,21 +185,21 @@ class InputsElementalToElementalNodalFc(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_to_elemental_nodal_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elemental_to_elemental_nodal_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input( + self._mesh: Input[MeshedRegion] = Input( elemental_to_elemental_nodal_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( elemental_to_elemental_nodal_fc._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._mesh_scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -212,7 +218,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -231,7 +237,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -264,13 +270,13 @@ class OutputsElementalToElementalNodalFc(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_to_elemental_nodal_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elemental_to_elemental_nodal_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_to_nodal.py b/src/ansys/dpf/core/operators/averaging/elemental_to_nodal.py index baffe12e232..fab9859e215 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_to_nodal.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_to_nodal.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class elemental_to_nodal(Operator): r"""Transforms an Elemental field to a Nodal field. The result is computed @@ -229,19 +235,25 @@ class InputsElementalToNodal(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_to_nodal._spec().inputs, op) - self._field = Input(elemental_to_nodal._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + elemental_to_nodal._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh_scoping = Input(elemental_to_nodal._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + elemental_to_nodal._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._force_averaging = Input( + self._force_averaging: Input[int] = Input( elemental_to_nodal._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._force_averaging) - self._algorithm = Input(elemental_to_nodal._spec().input_pin(200), 200, op, -1) + self._algorithm: Input[int] = Input( + elemental_to_nodal._spec().input_pin(200), 200, op, -1 + ) self._inputs.append(self._algorithm) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -262,7 +274,7 @@ def field(self) -> Input: return self._field @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -281,7 +293,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def force_averaging(self) -> Input: + def force_averaging(self) -> Input[int]: r"""Allows to connect force_averaging input to the operator. Averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for discrete ones). @@ -302,7 +314,7 @@ def force_averaging(self) -> Input: return self._force_averaging @property - def algorithm(self) -> Input: + def algorithm(self) -> Input[int]: r"""Allows to connect algorithm input to the operator. Forces the usage of algorithm 1, 2 or 3 (default is chosen based on the type of mesh). @@ -337,11 +349,13 @@ class OutputsElementalToNodal(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_to_nodal._spec().outputs, op) - self._field = Output(elemental_to_nodal._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + elemental_to_nodal._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py index 76e1344ab9f..da268563078 100644 --- a/src/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py +++ b/src/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class elemental_to_nodal_fc(Operator): r"""Transforms Elemental Nodal fields to Nodal fields. The result is @@ -239,27 +247,29 @@ class InputsElementalToNodalFc(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_to_nodal_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elemental_to_nodal_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(elemental_to_nodal_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elemental_to_nodal_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._force_averaging = Input( + self._force_averaging: Input[int] = Input( elemental_to_nodal_fc._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._force_averaging) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( elemental_to_nodal_fc._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._mesh_scoping) - self._algorithm = Input( + self._algorithm: Input[int] = Input( elemental_to_nodal_fc._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._algorithm) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -278,7 +288,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. Returns @@ -297,7 +307,7 @@ def mesh(self) -> Input: return self._mesh @property - def force_averaging(self) -> Input: + def force_averaging(self) -> Input[int]: r"""Allows to connect force_averaging input to the operator. Averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for discrete ones). @@ -318,7 +328,7 @@ def force_averaging(self) -> Input: return self._force_averaging @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -337,7 +347,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def algorithm(self) -> Input: + def algorithm(self) -> Input[int]: r"""Allows to connect algorithm input to the operator. Forces the usage of algorithm 1, 2 or 3 (default is chosen based on the type of mesh). @@ -372,13 +382,13 @@ class OutputsElementalToNodalFc(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_to_nodal_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elemental_to_nodal_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py b/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py index 63472439e08..d41dd95e0c0 100644 --- a/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py +++ b/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class extend_to_mid_nodes(Operator): r"""Extends an Elemental Nodal or Nodal field defined on corner nodes to a @@ -157,13 +163,17 @@ class InputsExtendToMidNodes(_Inputs): def __init__(self, op: Operator): super().__init__(extend_to_mid_nodes._spec().inputs, op) - self._field = Input(extend_to_mid_nodes._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + extend_to_mid_nodes._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh = Input(extend_to_mid_nodes._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + extend_to_mid_nodes._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -184,7 +194,7 @@ def field(self) -> Input: return self._field @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -217,11 +227,13 @@ class OutputsExtendToMidNodes(_Outputs): def __init__(self, op: Operator): super().__init__(extend_to_mid_nodes._spec().outputs, op) - self._field = Output(extend_to_mid_nodes._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + extend_to_mid_nodes._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py b/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py index 9b234ea5d09..38026898afd 100644 --- a/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py +++ b/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class extend_to_mid_nodes_fc(Operator): r"""Extends Elemental Nodal or Nodal fields defined on corner nodes to @@ -157,15 +162,17 @@ class InputsExtendToMidNodesFc(_Inputs): def __init__(self, op: Operator): super().__init__(extend_to_mid_nodes_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( extend_to_mid_nodes_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(extend_to_mid_nodes_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion] = Input( + extend_to_mid_nodes_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -184,7 +191,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. The mesh region in this pin is used to perform the averaging. It is used if there is no fields support. @@ -219,13 +226,13 @@ class OutputsExtendToMidNodesFc(_Outputs): def __init__(self, op: Operator): super().__init__(extend_to_mid_nodes_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( extend_to_mid_nodes_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/force_summation.py b/src/ansys/dpf/core/operators/averaging/force_summation.py index 785732fe604..d0ab3b6b76a 100644 --- a/src/ansys/dpf/core/operators/averaging/force_summation.py +++ b/src/ansys/dpf/core/operators/averaging/force_summation.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class force_summation(Operator): r"""Computes the sum of elemental forces contribution on a set of nodes in @@ -288,23 +297,37 @@ class InputsForceSummation(_Inputs): def __init__(self, op: Operator): super().__init__(force_summation._spec().inputs, op) - self._time_scoping = Input(force_summation._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping] = Input( + force_summation._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._nodal_scoping = Input(force_summation._spec().input_pin(1), 1, op, -1) + self._nodal_scoping: Input[Scoping | ScopingsContainer] = Input( + force_summation._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._nodal_scoping) - self._elemental_scoping = Input(force_summation._spec().input_pin(2), 2, op, -1) + self._elemental_scoping: Input[Scoping | ScopingsContainer] = Input( + force_summation._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._elemental_scoping) - self._streams_container = Input(force_summation._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + force_summation._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(force_summation._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + force_summation._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._force_type = Input(force_summation._spec().input_pin(5), 5, op, -1) + self._force_type: Input[int] = Input( + force_summation._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._force_type) - self._spoint = Input(force_summation._spec().input_pin(6), 6, op, -1) + self._spoint: Input[Field | FieldsContainer] = Input( + force_summation._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._spoint) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. default = all time steps @@ -325,7 +348,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def nodal_scoping(self) -> Input: + def nodal_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect nodal_scoping input to the operator. Nodal Scoping or Scopings Container with a single label. Set of nodes in which elemental contribution forces will be accumulated (default = all nodes) @@ -346,7 +369,7 @@ def nodal_scoping(self) -> Input: return self._nodal_scoping @property - def elemental_scoping(self) -> Input: + def elemental_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect elemental_scoping input to the operator. Elemental Scoping or Scopings Container with a single label. Set of elements contributing to the force calculation. (default = all elements) @@ -367,7 +390,7 @@ def elemental_scoping(self) -> Input: return self._elemental_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Streams container. Optional if using data sources. @@ -388,7 +411,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data sources. Optional if using a streams container. @@ -409,7 +432,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def force_type(self) -> Input: + def force_type(self) -> Input[int]: r"""Allows to connect force_type input to the operator. Type of force to be processed (0: Total forces (static, damping, and inertia)., 1 (default): Static forces, 2: Damping forces, 3: Inertia forces) @@ -430,7 +453,7 @@ def force_type(self) -> Input: return self._force_type @property - def spoint(self) -> Input: + def spoint(self) -> Input[Field | FieldsContainer]: r"""Allows to connect spoint input to the operator. Field or fields container of the coordinates of the point used for moment summations. Defaults to (0,0,0). @@ -470,21 +493,33 @@ class OutputsForceSummation(_Outputs): def __init__(self, op: Operator): super().__init__(force_summation._spec().outputs, op) - self._force_accumulation = Output(force_summation._spec().output_pin(0), 0, op) + self._force_accumulation: Output[FieldsContainer] = Output( + force_summation._spec().output_pin(0), 0, op + ) self._outputs.append(self._force_accumulation) - self._moment_accumulation = Output(force_summation._spec().output_pin(1), 1, op) + self._moment_accumulation: Output[FieldsContainer] = Output( + force_summation._spec().output_pin(1), 1, op + ) self._outputs.append(self._moment_accumulation) - self._heat_accumulation = Output(force_summation._spec().output_pin(2), 2, op) + self._heat_accumulation: Output[FieldsContainer] = Output( + force_summation._spec().output_pin(2), 2, op + ) self._outputs.append(self._heat_accumulation) - self._forces_on_nodes = Output(force_summation._spec().output_pin(10), 10, op) + self._forces_on_nodes: Output[FieldsContainer] = Output( + force_summation._spec().output_pin(10), 10, op + ) self._outputs.append(self._forces_on_nodes) - self._moments_on_nodes = Output(force_summation._spec().output_pin(11), 11, op) + self._moments_on_nodes: Output[FieldsContainer] = Output( + force_summation._spec().output_pin(11), 11, op + ) self._outputs.append(self._moments_on_nodes) - self._heats_on_nodes = Output(force_summation._spec().output_pin(12), 12, op) + self._heats_on_nodes: Output[FieldsContainer] = Output( + force_summation._spec().output_pin(12), 12, op + ) self._outputs.append(self._heats_on_nodes) @property - def force_accumulation(self) -> Output: + def force_accumulation(self) -> Output[FieldsContainer]: r"""Allows to get force_accumulation output of the operator Returns @@ -502,7 +537,7 @@ def force_accumulation(self) -> Output: return self._force_accumulation @property - def moment_accumulation(self) -> Output: + def moment_accumulation(self) -> Output[FieldsContainer]: r"""Allows to get moment_accumulation output of the operator Returns @@ -520,7 +555,7 @@ def moment_accumulation(self) -> Output: return self._moment_accumulation @property - def heat_accumulation(self) -> Output: + def heat_accumulation(self) -> Output[FieldsContainer]: r"""Allows to get heat_accumulation output of the operator Returns @@ -538,7 +573,7 @@ def heat_accumulation(self) -> Output: return self._heat_accumulation @property - def forces_on_nodes(self) -> Output: + def forces_on_nodes(self) -> Output[FieldsContainer]: r"""Allows to get forces_on_nodes output of the operator Returns @@ -556,7 +591,7 @@ def forces_on_nodes(self) -> Output: return self._forces_on_nodes @property - def moments_on_nodes(self) -> Output: + def moments_on_nodes(self) -> Output[FieldsContainer]: r"""Allows to get moments_on_nodes output of the operator Returns @@ -574,7 +609,7 @@ def moments_on_nodes(self) -> Output: return self._moments_on_nodes @property - def heats_on_nodes(self) -> Output: + def heats_on_nodes(self) -> Output[FieldsContainer]: r"""Allows to get heats_on_nodes output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/force_summation_psd.py b/src/ansys/dpf/core/operators/averaging/force_summation_psd.py index 2d2f25ad5bd..972208146f5 100644 --- a/src/ansys/dpf/core/operators/averaging/force_summation_psd.py +++ b/src/ansys/dpf/core/operators/averaging/force_summation_psd.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class force_summation_psd(Operator): r"""Computes the sum of elemental forces contribution on a set of nodes in @@ -274,25 +283,37 @@ class InputsForceSummationPsd(_Inputs): def __init__(self, op: Operator): super().__init__(force_summation_psd._spec().inputs, op) - self._nodal_scoping = Input(force_summation_psd._spec().input_pin(1), 1, op, -1) + self._nodal_scoping: Input[Scoping | ScopingsContainer] = Input( + force_summation_psd._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._nodal_scoping) - self._elemental_scoping = Input( + self._elemental_scoping: Input[Scoping | ScopingsContainer] = Input( force_summation_psd._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._elemental_scoping) - self._streams = Input(force_summation_psd._spec().input_pin(3), 3, op, -1) + self._streams: Input[StreamsContainer] = Input( + force_summation_psd._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams) - self._data_sources = Input(force_summation_psd._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + force_summation_psd._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._spoint = Input(force_summation_psd._spec().input_pin(6), 6, op, -1) + self._spoint: Input[Field | FieldsContainer] = Input( + force_summation_psd._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._spoint) - self._abs_rel_key = Input(force_summation_psd._spec().input_pin(7), 7, op, -1) + self._abs_rel_key: Input[int] = Input( + force_summation_psd._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._abs_rel_key) - self._signif = Input(force_summation_psd._spec().input_pin(8), 8, op, -1) + self._signif: Input[float] = Input( + force_summation_psd._spec().input_pin(8), 8, op, -1 + ) self._inputs.append(self._signif) @property - def nodal_scoping(self) -> Input: + def nodal_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect nodal_scoping input to the operator. Nodal scoping or scopings container with a single label. Set of nodes in which elemental contribution forces will be accumulated. Defaults to all nodes. @@ -313,7 +334,7 @@ def nodal_scoping(self) -> Input: return self._nodal_scoping @property - def elemental_scoping(self) -> Input: + def elemental_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect elemental_scoping input to the operator. Elemental scoping or scopings container with a single label. Set of elements contributing to the force calculation. Defaults to all elements. @@ -334,7 +355,7 @@ def elemental_scoping(self) -> Input: return self._elemental_scoping @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Streams container for RST and PSD files (optional if using data sources). The operator supports both a single RST file and two separate RST files. See data sources pin specifications for details on how to define the streams for both cases. @@ -355,7 +376,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data sources containing RST and PSD files (optional if using a streams container). The operator supports both a single RST file (containing both modal and PSD results) and two separate RST files (one for modal and one for PSD analyses).The data source containing modal results must be defined as an upstream data source.If using a single RST file for PSD and modal analysis, the RST file must be in an upstream data source.If using two separate RST files, only the modal RST must be in an upstream data source. @@ -376,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def spoint(self) -> Input: + def spoint(self) -> Input[Field | FieldsContainer]: r"""Allows to connect spoint input to the operator. Field or fields container of the coordinates of the point used for moment summations. Defaults to (0,0,0). @@ -397,7 +418,7 @@ def spoint(self) -> Input: return self._spoint @property - def abs_rel_key(self) -> Input: + def abs_rel_key(self) -> Input[int]: r"""Allows to connect abs_rel_key input to the operator. Key to select the type of response: 0 for relative response (default) or 1 for absolute response. @@ -418,7 +439,7 @@ def abs_rel_key(self) -> Input: return self._abs_rel_key @property - def signif(self) -> Input: + def signif(self) -> Input[float]: r"""Allows to connect signif input to the operator. Significance threshold, defaults to 0.0001. Any mode with a significance level above this value will be included in the combination.The significance level is defined as the modal covariance matrix term, divided by the maximum modal covariance matrix term. @@ -456,25 +477,25 @@ class OutputsForceSummationPsd(_Outputs): def __init__(self, op: Operator): super().__init__(force_summation_psd._spec().outputs, op) - self._force_accumulation = Output( + self._force_accumulation: Output[FieldsContainer] = Output( force_summation_psd._spec().output_pin(0), 0, op ) self._outputs.append(self._force_accumulation) - self._moment_accumulation = Output( + self._moment_accumulation: Output[FieldsContainer] = Output( force_summation_psd._spec().output_pin(1), 1, op ) self._outputs.append(self._moment_accumulation) - self._forces_on_nodes = Output( + self._forces_on_nodes: Output[FieldsContainer] = Output( force_summation_psd._spec().output_pin(10), 10, op ) self._outputs.append(self._forces_on_nodes) - self._moments_on_nodes = Output( + self._moments_on_nodes: Output[FieldsContainer] = Output( force_summation_psd._spec().output_pin(11), 11, op ) self._outputs.append(self._moments_on_nodes) @property - def force_accumulation(self) -> Output: + def force_accumulation(self) -> Output[FieldsContainer]: r"""Allows to get force_accumulation output of the operator Returns the sum of forces for the 1-sigma displacement solution on the scoped nodes/elements. @@ -494,7 +515,7 @@ def force_accumulation(self) -> Output: return self._force_accumulation @property - def moment_accumulation(self) -> Output: + def moment_accumulation(self) -> Output[FieldsContainer]: r"""Allows to get moment_accumulation output of the operator Returns the sum of moments for the 1-sigma displacement solution on the scoped nodes/elements. @@ -514,7 +535,7 @@ def moment_accumulation(self) -> Output: return self._moment_accumulation @property - def forces_on_nodes(self) -> Output: + def forces_on_nodes(self) -> Output[FieldsContainer]: r"""Allows to get forces_on_nodes output of the operator Returns the nodal forces for the 1-sigma displacement solution on the scoped nodes/elements. @@ -534,7 +555,7 @@ def forces_on_nodes(self) -> Output: return self._forces_on_nodes @property - def moments_on_nodes(self) -> Output: + def moments_on_nodes(self) -> Output[FieldsContainer]: r"""Allows to get moments_on_nodes output of the operator Returns the nodal moments for the 1-sigma displacement solution on the scoped nodes/elements. diff --git a/src/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py b/src/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py index d2efe05a5d3..1d67671b355 100644 --- a/src/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py +++ b/src/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + class gauss_to_node_fc(Operator): r"""Extrapolates results available at Gauss or quadrature points to nodal @@ -178,15 +185,21 @@ class InputsGaussToNodeFc(_Inputs): def __init__(self, op: Operator): super().__init__(gauss_to_node_fc._spec().inputs, op) - self._fields_container = Input(gauss_to_node_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + gauss_to_node_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(gauss_to_node_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gauss_to_node_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._scoping = Input(gauss_to_node_fc._spec().input_pin(3), 3, op, -1) + self._scoping: Input[Scoping] = Input( + gauss_to_node_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -205,7 +218,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. The mesh region in this pin is used for extrapolating results available at Gauss or quadrature points to nodal points. @@ -226,7 +239,7 @@ def mesh(self) -> Input: return self._mesh @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Extrapolating results on the selected scoping. If it is a scoping container, the label must correspond to the one of the fields containers. @@ -261,11 +274,13 @@ class OutputsGaussToNodeFc(_Outputs): def __init__(self, op: Operator): super().__init__(gauss_to_node_fc._spec().outputs, op) - self._fields_container = Output(gauss_to_node_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + gauss_to_node_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/nodal_difference.py b/src/ansys/dpf/core/operators/averaging/nodal_difference.py index 1bc0f857837..ff1bf199c94 100644 --- a/src/ansys/dpf/core/operators/averaging/nodal_difference.py +++ b/src/ansys/dpf/core/operators/averaging/nodal_difference.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class nodal_difference(Operator): r"""Transforms an Elemental Nodal field into a Nodal field. Each nodal value @@ -178,15 +185,21 @@ class InputsNodalDifference(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_difference._spec().inputs, op) - self._field = Input(nodal_difference._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + nodal_difference._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh_scoping = Input(nodal_difference._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + nodal_difference._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._mesh = Input(nodal_difference._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + nodal_difference._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -207,7 +220,7 @@ def field(self) -> Input: return self._field @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. average only on these entities @@ -228,7 +241,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -261,11 +274,13 @@ class OutputsNodalDifference(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_difference._spec().outputs, op) - self._field = Output(nodal_difference._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + nodal_difference._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/nodal_difference_fc.py b/src/ansys/dpf/core/operators/averaging/nodal_difference_fc.py index 01f1bb15142..a00f9adc6b9 100644 --- a/src/ansys/dpf/core/operators/averaging/nodal_difference_fc.py +++ b/src/ansys/dpf/core/operators/averaging/nodal_difference_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class nodal_difference_fc(Operator): r"""Transforms Elemental Nodal fields into Nodal fields. Each nodal value is @@ -182,17 +190,21 @@ class InputsNodalDifferenceFc(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_difference_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_difference_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(nodal_difference_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_difference_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._scoping = Input(nodal_difference_fc._spec().input_pin(3), 3, op, -1) + self._scoping: Input[Scoping | ScopingsContainer] = Input( + nodal_difference_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -211,7 +223,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. The mesh region in this pin is used to perform the averaging. It is used if there is no fields support. @@ -232,7 +244,7 @@ def mesh(self) -> Input: return self._mesh @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect scoping input to the operator. Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers. @@ -267,13 +279,13 @@ class OutputsNodalDifferenceFc(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_difference_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_difference_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py b/src/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py index 1fed6f0c89a..cc9ca6a7a6d 100644 --- a/src/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py +++ b/src/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class nodal_fraction_fc(Operator): r"""Transforms Elemental Nodal fields into Nodal fields. Each nodal value is @@ -197,19 +203,25 @@ class InputsNodalFractionFc(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_fraction_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_fraction_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(nodal_fraction_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion] = Input( + nodal_fraction_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._scoping = Input(nodal_fraction_fc._spec().input_pin(3), 3, op, -1) + self._scoping: Input[Scoping] = Input( + nodal_fraction_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) - self._denominator = Input(nodal_fraction_fc._spec().input_pin(6), 6, op, -1) + self._denominator: Input[FieldsContainer] = Input( + nodal_fraction_fc._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._denominator) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -228,7 +240,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. The mesh region in this pin is used to perform the averaging. It is used if there is no fields support. @@ -249,7 +261,7 @@ def mesh(self) -> Input: return self._mesh @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers. @@ -270,7 +282,7 @@ def scoping(self) -> Input: return self._scoping @property - def denominator(self) -> Input: + def denominator(self) -> Input[FieldsContainer]: r"""Allows to connect denominator input to the operator. If a fields container is set in this pin, it is used as the denominator of the fraction instead of elemental_nodal_To_nodal_fc. @@ -305,11 +317,13 @@ class OutputsNodalFractionFc(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_fraction_fc._spec().outputs, op) - self._fields_container = Output(nodal_fraction_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + nodal_fraction_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental.py b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental.py index 71493922fe5..46319c01d09 100644 --- a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental.py +++ b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class nodal_to_elemental(Operator): r"""Transforms a Nodal field to an Elemental field, The result is computed @@ -179,17 +185,21 @@ class InputsNodalToElemental(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_to_elemental._spec().inputs, op) - self._field = Input(nodal_to_elemental._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + nodal_to_elemental._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh_scoping = Input(nodal_to_elemental._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + nodal_to_elemental._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._collapse_shell_layers = Input( + self._collapse_shell_layers: Input[bool] = Input( nodal_to_elemental._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._collapse_shell_layers) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -210,7 +220,7 @@ def field(self) -> Input: return self._field @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -229,7 +239,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def collapse_shell_layers(self) -> Input: + def collapse_shell_layers(self) -> Input[bool]: r"""Allows to connect collapse_shell_layers input to the operator. If true, the data across different shell layers is averaged as well (default is false). @@ -264,11 +274,13 @@ class OutputsNodalToElemental(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_to_elemental._spec().outputs, op) - self._field = Output(nodal_to_elemental._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + nodal_to_elemental._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py index 3e125031e63..7ddcbd190b0 100644 --- a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py +++ b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class nodal_to_elemental_fc(Operator): r"""Transforms Nodal fields into Elemental fields using an averaging @@ -233,29 +241,33 @@ class InputsNodalToElementalFc(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_to_elemental_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_to_elemental_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(nodal_to_elemental_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_to_elemental_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._scoping = Input(nodal_to_elemental_fc._spec().input_pin(3), 3, op, -1) + self._scoping: Input[Scoping | ScopingsContainer] = Input( + nodal_to_elemental_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) - self._collapse_shell_layers = Input( + self._collapse_shell_layers: Input[bool] = Input( nodal_to_elemental_fc._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._collapse_shell_layers) - self._merge_solid_shell = Input( + self._merge_solid_shell: Input[bool] = Input( nodal_to_elemental_fc._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._merge_solid_shell) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( nodal_to_elemental_fc._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -274,7 +286,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. The mesh region in this pin is used to perform the averaging. It is used if there is no fields support. @@ -295,7 +307,7 @@ def mesh(self) -> Input: return self._mesh @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect scoping input to the operator. Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers. @@ -316,7 +328,7 @@ def scoping(self) -> Input: return self._scoping @property - def collapse_shell_layers(self) -> Input: + def collapse_shell_layers(self) -> Input[bool]: r"""Allows to connect collapse_shell_layers input to the operator. If true, the data across different shell layers is averaged as well (default is false). @@ -337,7 +349,7 @@ def collapse_shell_layers(self) -> Input: return self._collapse_shell_layers @property - def merge_solid_shell(self) -> Input: + def merge_solid_shell(self) -> Input[bool]: r"""Allows to connect merge_solid_shell input to the operator. For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true and collapse_shell_layers is false, a shell_layer needs to be specified. @@ -358,7 +370,7 @@ def merge_solid_shell(self) -> Input: return self._merge_solid_shell @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid). @@ -393,13 +405,13 @@ class OutputsNodalToElementalFc(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_to_elemental_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_to_elemental_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal.py b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal.py index d6596251a60..16f83b2c0e5 100644 --- a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal.py +++ b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class nodal_to_elemental_nodal(Operator): r"""Transforms a Nodal field to an ElementalNodal field, The result is @@ -179,19 +185,21 @@ class InputsNodalToElementalNodal(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_to_elemental_nodal._spec().inputs, op) - self._field = Input(nodal_to_elemental_nodal._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + nodal_to_elemental_nodal._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( nodal_to_elemental_nodal._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._collapse_shell_layers = Input( + self._collapse_shell_layers: Input[bool] = Input( nodal_to_elemental_nodal._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._collapse_shell_layers) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -212,7 +220,7 @@ def field(self) -> Input: return self._field @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -231,7 +239,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def collapse_shell_layers(self) -> Input: + def collapse_shell_layers(self) -> Input[bool]: r"""Allows to connect collapse_shell_layers input to the operator. If true, the data across different shell layers is averaged as well (default is false). @@ -266,11 +274,13 @@ class OutputsNodalToElementalNodal(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_to_elemental_nodal._spec().outputs, op) - self._field = Output(nodal_to_elemental_nodal._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + nodal_to_elemental_nodal._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal_fc.py index 2b1f7f69879..01d44496690 100644 --- a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal_fc.py +++ b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class nodal_to_elemental_nodal_fc(Operator): r"""Transforms Nodal fields_container to Elemental Nodal fields_container. @@ -179,19 +185,21 @@ class InputsNodalToElementalNodalFc(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_to_elemental_nodal_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_to_elemental_nodal_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(nodal_to_elemental_nodal_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion] = Input( + nodal_to_elemental_nodal_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( nodal_to_elemental_nodal_fc._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._mesh_scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -210,7 +218,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -229,7 +237,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -262,13 +270,13 @@ class OutputsNodalToElementalNodalFc(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_to_elemental_nodal_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_to_elemental_nodal_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/to_elemental_fc.py b/src/ansys/dpf/core/operators/averaging/to_elemental_fc.py index 4f5fde6559e..8190f6e317d 100644 --- a/src/ansys/dpf/core/operators/averaging/to_elemental_fc.py +++ b/src/ansys/dpf/core/operators/averaging/to_elemental_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class to_elemental_fc(Operator): r"""Transforms Input fields into Elemental fields using an averaging @@ -241,27 +247,37 @@ class InputsToElementalFc(_Inputs): def __init__(self, op: Operator): super().__init__(to_elemental_fc._spec().inputs, op) - self._fields_container = Input(to_elemental_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + to_elemental_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(to_elemental_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion] = Input( + to_elemental_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._mesh_scoping = Input(to_elemental_fc._spec().input_pin(3), 3, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + to_elemental_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._smoothen_values = Input(to_elemental_fc._spec().input_pin(7), 7, op, -1) + self._smoothen_values: Input[bool] = Input( + to_elemental_fc._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._smoothen_values) - self._collapse_shell_layers = Input( + self._collapse_shell_layers: Input[bool] = Input( to_elemental_fc._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._collapse_shell_layers) - self._merge_solid_shell = Input( + self._merge_solid_shell: Input[bool] = Input( to_elemental_fc._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._merge_solid_shell) - self._shell_layer = Input(to_elemental_fc._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + to_elemental_fc._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -280,7 +296,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -299,7 +315,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -318,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def smoothen_values(self) -> Input: + def smoothen_values(self) -> Input[bool]: r"""Allows to connect smoothen_values input to the operator. if it is set to true, Elemental Nodal fields are first averaged on nodes and then averaged on elements (default is false). @@ -339,7 +355,7 @@ def smoothen_values(self) -> Input: return self._smoothen_values @property - def collapse_shell_layers(self) -> Input: + def collapse_shell_layers(self) -> Input[bool]: r"""Allows to connect collapse_shell_layers input to the operator. If true, the data across different shell layers is averaged as well (default is false). @@ -360,7 +376,7 @@ def collapse_shell_layers(self) -> Input: return self._collapse_shell_layers @property - def merge_solid_shell(self) -> Input: + def merge_solid_shell(self) -> Input[bool]: r"""Allows to connect merge_solid_shell input to the operator. For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true and collapse_shell_layers is false, a shell_layer needs to be specified. @@ -381,7 +397,7 @@ def merge_solid_shell(self) -> Input: return self._merge_solid_shell @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid). @@ -416,11 +432,13 @@ class OutputsToElementalFc(_Outputs): def __init__(self, op: Operator): super().__init__(to_elemental_fc._spec().outputs, op) - self._fields_container = Output(to_elemental_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + to_elemental_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/to_elemental_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/to_elemental_nodal_fc.py index 2967d29dcc4..b5eaa9b1e80 100644 --- a/src/ansys/dpf/core/operators/averaging/to_elemental_nodal_fc.py +++ b/src/ansys/dpf/core/operators/averaging/to_elemental_nodal_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class to_elemental_nodal_fc(Operator): r"""Transforms fields into Elemental Nodal fields using an averaging @@ -177,19 +183,21 @@ class InputsToElementalNodalFc(_Inputs): def __init__(self, op: Operator): super().__init__(to_elemental_nodal_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( to_elemental_nodal_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( to_elemental_nodal_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._mesh = Input(to_elemental_nodal_fc._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + to_elemental_nodal_fc._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -208,7 +216,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -227,7 +235,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -260,13 +268,13 @@ class OutputsToElementalNodalFc(_Outputs): def __init__(self, op: Operator): super().__init__(to_elemental_nodal_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( to_elemental_nodal_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/to_nodal.py b/src/ansys/dpf/core/operators/averaging/to_nodal.py index 2787079ff2d..34e0e2ee31c 100644 --- a/src/ansys/dpf/core/operators/averaging/to_nodal.py +++ b/src/ansys/dpf/core/operators/averaging/to_nodal.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class to_nodal(Operator): r"""Transforms a field into a Nodal field using an averaging process. The @@ -195,17 +201,25 @@ class InputsToNodal(_Inputs): def __init__(self, op: Operator): super().__init__(to_nodal._spec().inputs, op) - self._field = Input(to_nodal._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + to_nodal._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh_scoping = Input(to_nodal._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + to_nodal._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._merge_solid_shell = Input(to_nodal._spec().input_pin(26), 26, op, -1) + self._merge_solid_shell: Input[bool] = Input( + to_nodal._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._merge_solid_shell) - self._shell_layer = Input(to_nodal._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + to_nodal._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -226,7 +240,7 @@ def field(self) -> Input: return self._field @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -245,7 +259,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def merge_solid_shell(self) -> Input: + def merge_solid_shell(self) -> Input[bool]: r"""Allows to connect merge_solid_shell input to the operator. For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified. @@ -266,7 +280,7 @@ def merge_solid_shell(self) -> Input: return self._merge_solid_shell @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid). @@ -301,11 +315,11 @@ class OutputsToNodal(_Outputs): def __init__(self, op: Operator): super().__init__(to_nodal._spec().outputs, op) - self._field = Output(to_nodal._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(to_nodal._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/averaging/to_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/to_nodal_fc.py index 5a975f7166a..d251638a256 100644 --- a/src/ansys/dpf/core/operators/averaging/to_nodal_fc.py +++ b/src/ansys/dpf/core/operators/averaging/to_nodal_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class to_nodal_fc(Operator): r"""Transforms fields into Nodal fields using an averaging process. The @@ -209,19 +215,29 @@ class InputsToNodalFc(_Inputs): def __init__(self, op: Operator): super().__init__(to_nodal_fc._spec().inputs, op) - self._fields_container = Input(to_nodal_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + to_nodal_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(to_nodal_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion] = Input( + to_nodal_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._mesh_scoping = Input(to_nodal_fc._spec().input_pin(3), 3, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + to_nodal_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._merge_solid_shell = Input(to_nodal_fc._spec().input_pin(26), 26, op, -1) + self._merge_solid_shell: Input[bool] = Input( + to_nodal_fc._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._merge_solid_shell) - self._shell_layer = Input(to_nodal_fc._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + to_nodal_fc._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -240,7 +256,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -259,7 +275,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -278,7 +294,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def merge_solid_shell(self) -> Input: + def merge_solid_shell(self) -> Input[bool]: r"""Allows to connect merge_solid_shell input to the operator. For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified. @@ -299,7 +315,7 @@ def merge_solid_shell(self) -> Input: return self._merge_solid_shell @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid). @@ -334,11 +350,13 @@ class OutputsToNodalFc(_Outputs): def __init__(self, op: Operator): super().__init__(to_nodal_fc._spec().outputs, op) - self._fields_container = Output(to_nodal_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + to_nodal_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/build.py b/src/ansys/dpf/core/operators/build.py index c9d30487377..66e6f653cf6 100644 --- a/src/ansys/dpf/core/operators/build.py +++ b/src/ansys/dpf/core/operators/build.py @@ -1,9 +1,14 @@ """Build static source operators from DPF server.""" + import copy -import os from datetime import datetime +import importlib +import inspect +import os +import pkgutil from textwrap import wrap import time +from typing import Optional import black import chevron @@ -11,14 +16,13 @@ from ansys.dpf import core as dpf from ansys.dpf.core import common from ansys.dpf.core.dpf_operator import available_operator_names -from ansys.dpf.core.outputs import _make_printable_type from ansys.dpf.core.mapping_types import map_types_to_python from ansys.dpf.core.operators.translator import Markdown2RstTranslator - +from ansys.dpf.core.outputs import _make_printable_type # Operator internal names to call if first name is not available # Allows deprecating internal names associated to public Python operator modules -operator_aliases = { +OPERATOR_ALIASES = { "support_provider_cyclic": "mapdl::rst::support_provider_cyclic", "NMISC": "mapdl::nmisc", "SMISC": "mapdl::smisc", @@ -27,6 +31,62 @@ "MCF": "U", } +BUILT_IN_TYPES = ("int", "double", "string", "bool", "float", "str", "dict") + +TYPES_WITHOUT_PYTHON_IMPLEMENTATION = ( + "Materials", + "AnsDispatchHolder", + "Stream", + "AbstractFieldSupport", + "AnyCollection", + "CustomTypeFieldsContainer", + "MeshSelectionManager", + "Class Dataprocessing::Dpftypecollection", + "Struct Iansdispatch", + "PropertyFieldsContainer", + "Class Dataprocessing::Crstfilewrapper", + "Char", +) + + +def find_class_origin(class_name: str, package_name: str = "ansys.dpf.core") -> Optional[str]: + """Find the fully qualified import path where a class is originally defined.""" + try: + pkg = importlib.import_module(package_name) + except ModuleNotFoundError: + raise ValueError(f"Package '{package_name}' not found") + + # ensure we’re working with a real package, not a module + if not hasattr(pkg, "__path__"): + raise ValueError(f"'{package_name}' is not a package") + + # include the top-level package itself + modules_to_check = [package_name] + + # add all submodules of the package + for modinfo in pkgutil.walk_packages(pkg.__path__, prefix=f"{package_name}."): + modules_to_check.append(modinfo.name) + + # search through all modules + for mod_name in modules_to_check: + try: + mod = importlib.import_module(mod_name) + except Exception: + # skip broken or unimportable modules + continue + + cls = getattr(mod, class_name, None) + if cls is None or not inspect.isclass(cls): + continue + + # get the module where the class is actually defined + defining_module = inspect.getmodule(cls) + if defining_module and defining_module.__name__ == mod_name: + return f"{defining_module.__name__}" + + return None + + def build_docstring(specification_description): """Used to generate class docstrings.""" docstring = "" @@ -65,6 +125,7 @@ def build_pin_data(pins, output=False): pin_ids.sort() data = [] + for id in pin_ids: specification = pins[id] @@ -76,8 +137,13 @@ def build_pin_data(pins, output=False): type_names = update_type_names_for_ellipsis(type_names) docstring_types = map_types(type_names) parameter_types = " or ".join(docstring_types) - parameter_types = "\n".join( - wrap(parameter_types, subsequent_indent=" ", width=60) + parameter_types = "\n".join(wrap(parameter_types, subsequent_indent=" ", width=60)) + + type_list_for_annotation = " | ".join( + docstring_type + for docstring_type in docstring_types + if docstring_type + not in TYPES_WITHOUT_PYTHON_IMPLEMENTATION # Types without python implementations can't be typechecked ) pin_name = specification.name @@ -85,7 +151,6 @@ def build_pin_data(pins, output=False): pin_name = pin_name.replace(">", "_") main_type = docstring_types[0] if len(docstring_types) >= 1 else "" - built_in_types = ("int", "double", "string", "bool", "float", "str", "dict") # Case where output pin has multiple types. multiple_types = len(type_names) >= 2 @@ -99,16 +164,18 @@ def build_pin_data(pins, output=False): pin_data = { "id": id, "name": pin_name, - "pin_name": pin_name, # Base pin name, without numbers for when pin is ellipsis + "pin_name": pin_name, # Base pin name, without numbers for when pin is ellipsis "has_types": len(type_names) >= 1, "has_derived_class": len(derived_class_type_name) >= 1, "multiple_types": multiple_types, "printable_type_names": printable_type_names, "types": type_names, "derived_type_name": derived_class_type_name, + "docstring_types": docstring_types, + "type_list_for_annotation": type_list_for_annotation, "types_for_docstring": parameter_types, "main_type": main_type, - "built_in_main_type": main_type in built_in_types, + "built_in_main_type": main_type in BUILT_IN_TYPES, "optional": specification.optional, "document": document, "document_pin_docstring": document_pin_docstring, @@ -136,9 +203,13 @@ def build_pin_data(pins, output=False): def build_operator( - specification, operator_name, class_name, capital_class_name, category, specification_description + specification, + operator_name, + class_name, + capital_class_name, + category, + specification_description, ): - input_pins = [] if specification.inputs: input_pins = build_pin_data(specification.inputs) @@ -154,13 +225,37 @@ def build_operator( date_and_time = datetime.now().strftime("%m/%d/%Y, %H:%M:%S") + annotation_import_types = set() + for input_pin in input_pins: + annotation_import_types.update(input_pin["docstring_types"]) + for output_pin in output_pins: + # Output pins with multiple types can't be annotated with current operators design + if output_pin["multiple_types"]: + continue + annotation_import_types.update(output_pin["docstring_types"]) + annotation_import_list = [] + for annotation_type in annotation_import_types: + if annotation_type in BUILT_IN_TYPES + TYPES_WITHOUT_PYTHON_IMPLEMENTATION: + continue + definition_location = find_class_origin(annotation_type) + annotation_import_list.append( + { + "class_name": annotation_type, + "definition_location": definition_location, + } + ) + annotation_import_list.sort(key= lambda x: x["class_name"].split("ansys.dpf.core.")[-1]) + non_empty_annotation_import_list = bool(annotation_import_list) + data = { "operator_name": operator_name, "class_name": class_name, - "class_name_underlining": len(class_name)*"=", + "class_name_underlining": len(class_name) * "=", "capital_class_name": capital_class_name, "docstring": docstring, "specification_description": specification_description, + "annotation_import_list": annotation_import_list, + "non_empty_annotation_import_list": non_empty_annotation_import_list, "input_pins": input_pins, "output_pins": output_pins, "outputs": len(output_pins) >= 1, @@ -169,8 +264,8 @@ def build_operator( "date_and_time": date_and_time, "has_input_aliases": has_input_aliases, "has_output_aliases": has_output_aliases, - "has_internal_name_alias": operator_name in operator_aliases.keys(), - "internal_name_alias": operator_aliases.get(operator_name), + "has_internal_name_alias": operator_name in OPERATOR_ALIASES.keys(), + "internal_name_alias": OPERATOR_ALIASES.get(operator_name), } this_path = os.path.dirname(os.path.abspath(__file__)) @@ -201,9 +296,12 @@ def build_operators(): # until they are fully deprecated hidden_to_expose = [ # Use internal names "change_fc", - "dot", "dot_tensor", - "scale_by_field", "scale_by_field_fc", - "invert", "invert_fc", + "dot", + "dot_tensor", + "scale_by_field", + "scale_by_field_fc", + "invert", + "invert_fc", ] categories = set() @@ -215,9 +313,10 @@ def build_operators(): print(f"{done} operators done...") specification = dpf.Operator.operator_specification(operator_name) - if (specification.properties["exposure"] in ["hidden", "private"] - and - operator_name not in hidden_to_expose): + if ( + specification.properties["exposure"] in ["hidden", "private"] + and operator_name not in hidden_to_expose + ): hidden += 1 continue @@ -248,7 +347,7 @@ def build_operators(): # Write to operator file operator_file = os.path.join(category_path, scripting_name + ".py") - with open(operator_file, "w", encoding='utf-8', newline="\u000A") as f: + with open(operator_file, "w", encoding="utf-8", newline="\u000a") as f: operator_str = scripting_name try: operator_str = build_operator( @@ -276,18 +375,23 @@ def build_operators(): # Create __init__.py files print(f"Generating __init__.py files...") - with open(os.path.join(this_path, "__init__.py"), "w", encoding="utf-8", newline="\u000A") as main_init: + with open( + os.path.join(this_path, "__init__.py"), "w", encoding="utf-8", newline="\u000a" + ) as main_init: for category in sorted(categories): # Add category to main init file imports main_init.write(f"from . import {category}\n") # Create category init file category_operators = os.listdir(os.path.join(this_path, category.split(".")[0])) - with open(os.path.join(this_path, category, "__init__.py"), "w", encoding="utf-8", newline="\u000A") as category_init: + with open( + os.path.join(this_path, category, "__init__.py"), + "w", + encoding="utf-8", + newline="\u000a", + ) as category_init: for category_operator in sorted(category_operators): operator_name = category_operator.split(".")[0] - category_init.write( - f"from .{operator_name} import {operator_name}\n" - ) + category_init.write(f"from .{operator_name} import {operator_name}\n") if succeeded == len(available_operators) - hidden: print("Success") diff --git a/src/ansys/dpf/core/operators/compression/apply_svd.py b/src/ansys/dpf/core/operators/compression/apply_svd.py index 9cca7bbfaae..d4e53c89252 100644 --- a/src/ansys/dpf/core/operators/compression/apply_svd.py +++ b/src/ansys/dpf/core/operators/compression/apply_svd.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class apply_svd(Operator): r"""Computes the coefficients (=U*Sigma) and VT components from SVD. @@ -214,19 +218,21 @@ class InputsApplySvd(_Inputs): def __init__(self, op: Operator): super().__init__(apply_svd._spec().inputs, op) - self._field_contaner_to_compress = Input( + self._field_contaner_to_compress: Input[FieldsContainer] = Input( apply_svd._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field_contaner_to_compress) - self._scalar_int = Input(apply_svd._spec().input_pin(1), 1, op, -1) + self._scalar_int: Input[int] = Input(apply_svd._spec().input_pin(1), 1, op, -1) self._inputs.append(self._scalar_int) - self._scalar_double = Input(apply_svd._spec().input_pin(2), 2, op, -1) + self._scalar_double: Input[float] = Input( + apply_svd._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._scalar_double) - self._boolean = Input(apply_svd._spec().input_pin(3), 3, op, -1) + self._boolean: Input[bool] = Input(apply_svd._spec().input_pin(3), 3, op, -1) self._inputs.append(self._boolean) @property - def field_contaner_to_compress(self) -> Input: + def field_contaner_to_compress(self) -> Input[FieldsContainer]: r"""Allows to connect field_contaner_to_compress input to the operator. Fields container with data to be compressed @@ -247,7 +253,7 @@ def field_contaner_to_compress(self) -> Input: return self._field_contaner_to_compress @property - def scalar_int(self) -> Input: + def scalar_int(self) -> Input[int]: r"""Allows to connect scalar_int input to the operator. Number of vectors (r) to keep for the future reconstraction of the matrix A, ex.A[m,n] = coef[m,r] * VT[r,n], where coef = U * Sigma @@ -268,7 +274,7 @@ def scalar_int(self) -> Input: return self._scalar_int @property - def scalar_double(self) -> Input: + def scalar_double(self) -> Input[float]: r"""Allows to connect scalar_double input to the operator. Threshold (precision) as a double (Default : 1e-7). If both pin1 and pin2 are provided, choose the min r-vectors @@ -289,7 +295,7 @@ def scalar_double(self) -> Input: return self._scalar_double @property - def boolean(self) -> Input: + def boolean(self) -> Input[bool]: r"""Allows to connect boolean input to the operator. Apply SVD on the initial data (Default : false), otherwise use reduced data (square matrix with the smallest dimensions). @@ -326,9 +332,13 @@ class OutputsApplySvd(_Outputs): def __init__(self, op: Operator): super().__init__(apply_svd._spec().outputs, op) - self._us_svd = Output(apply_svd._spec().output_pin(0), 0, op) + self._us_svd: Output[FieldsContainer] = Output( + apply_svd._spec().output_pin(0), 0, op + ) self._outputs.append(self._us_svd) - self._vt_svd = Output(apply_svd._spec().output_pin(1), 1, op) + self._vt_svd: Output[FieldsContainer] = Output( + apply_svd._spec().output_pin(1), 1, op + ) self._outputs.append(self._vt_svd) self.sigma_as_field = Output( _modify_output_spec_with_one_type(apply_svd._spec().output_pin(2), "field"), @@ -346,7 +356,7 @@ def __init__(self, op: Operator): self._outputs.append(self.sigma_as_fields_container) @property - def us_svd(self) -> Output: + def us_svd(self) -> Output[FieldsContainer]: r"""Allows to get us_svd output of the operator The output entity is a fields container (time dependant); it contains the product of two matrices, U and S, where A=U.S.Vt @@ -366,7 +376,7 @@ def us_svd(self) -> Output: return self._us_svd @property - def vt_svd(self) -> Output: + def vt_svd(self) -> Output[FieldsContainer]: r"""Allows to get vt_svd output of the operator The output entity is a field container (space dependant), containing the Vt, where A=U.S.Vt diff --git a/src/ansys/dpf/core/operators/compression/apply_zfp.py b/src/ansys/dpf/core/operators/compression/apply_zfp.py index c1424e4e44c..93d76a72282 100644 --- a/src/ansys/dpf/core/operators/compression/apply_zfp.py +++ b/src/ansys/dpf/core/operators/compression/apply_zfp.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class apply_zfp(Operator): r"""Compressing input data using one of zfp compression algorithm modes. @@ -261,23 +266,31 @@ class InputsApplyZfp(_Inputs): def __init__(self, op: Operator): super().__init__(apply_zfp._spec().inputs, op) - self._dataIn = Input(apply_zfp._spec().input_pin(0), 0, op, -1) + self._dataIn: Input[Field | FieldsContainer] = Input( + apply_zfp._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._dataIn) - self._mode = Input(apply_zfp._spec().input_pin(1), 1, op, -1) + self._mode: Input[str] = Input(apply_zfp._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mode) - self._mode_parameter = Input(apply_zfp._spec().input_pin(2), 2, op, -1) + self._mode_parameter: Input[int | float] = Input( + apply_zfp._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._mode_parameter) - self._dim = Input(apply_zfp._spec().input_pin(3), 3, op, -1) + self._dim: Input[int] = Input(apply_zfp._spec().input_pin(3), 3, op, -1) self._inputs.append(self._dim) - self._order = Input(apply_zfp._spec().input_pin(4), 4, op, -1) + self._order: Input[int] = Input(apply_zfp._spec().input_pin(4), 4, op, -1) self._inputs.append(self._order) - self._double_absthreshold = Input(apply_zfp._spec().input_pin(5), 5, op, -1) + self._double_absthreshold: Input[float] = Input( + apply_zfp._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._double_absthreshold) - self._double_relthreshold = Input(apply_zfp._spec().input_pin(6), 6, op, -1) + self._double_relthreshold: Input[float] = Input( + apply_zfp._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._double_relthreshold) @property - def dataIn(self) -> Input: + def dataIn(self) -> Input[Field | FieldsContainer]: r"""Allows to connect dataIn input to the operator. field or fields container to be compressed @@ -298,7 +311,7 @@ def dataIn(self) -> Input: return self._dataIn @property - def mode(self) -> Input: + def mode(self) -> Input[str]: r"""Allows to connect mode input to the operator. zfp mode: fixed-rate ('r'), fixed-precision ('p'), fixed-accuracy ('a') @@ -319,7 +332,7 @@ def mode(self) -> Input: return self._mode @property - def mode_parameter(self) -> Input: + def mode_parameter(self) -> Input[int | float]: r"""Allows to connect mode_parameter input to the operator. mode-corresponding parameter: rate (double) / precision (int) / accuracy (double) @@ -340,7 +353,7 @@ def mode_parameter(self) -> Input: return self._mode_parameter @property - def dim(self) -> Input: + def dim(self) -> Input[int]: r"""Allows to connect dim input to the operator. dimension (1D/2D/3D) for data organization before the compression (int; default: 2) @@ -361,7 +374,7 @@ def dim(self) -> Input: return self._dim @property - def order(self) -> Input: + def order(self) -> Input[int]: r"""Allows to connect order input to the operator. xyz dimensions order, where x (row) corresponds to number of elementary data, y (col) - number of time steps, z - number of components (applicable only for 3d data) : 0=xyz, 1=yxz (int; default: 0) @@ -382,7 +395,7 @@ def order(self) -> Input: return self._order @property - def double_absthreshold(self) -> Input: + def double_absthreshold(self) -> Input[float]: r"""Allows to connect double_absthreshold input to the operator. Double positive small value. All the values smaller than max(small value, max(vi) * relative threshold) are considered as zero values, (default value: 1.0e-18). @@ -403,7 +416,7 @@ def double_absthreshold(self) -> Input: return self._double_absthreshold @property - def double_relthreshold(self) -> Input: + def double_relthreshold(self) -> Input[float]: r"""Allows to connect double_relthreshold input to the operator. Double relative threshold. Values smaller than (v1 - v2) < max(small value, v1 * relativeTol) are considered identical (default value: 1.0e-10). @@ -440,15 +453,19 @@ class OutputsApplyZfp(_Outputs): def __init__(self, op: Operator): super().__init__(apply_zfp._spec().outputs, op) - self._compress_speed = Output(apply_zfp._spec().output_pin(0), 0, op) + self._compress_speed: Output[float] = Output( + apply_zfp._spec().output_pin(0), 0, op + ) self._outputs.append(self._compress_speed) - self._compress_ratio = Output(apply_zfp._spec().output_pin(1), 1, op) + self._compress_ratio: Output[float] = Output( + apply_zfp._spec().output_pin(1), 1, op + ) self._outputs.append(self._compress_ratio) - self._dataOut = Output(apply_zfp._spec().output_pin(2), 2, op) + self._dataOut: Output = Output(apply_zfp._spec().output_pin(2), 2, op) self._outputs.append(self._dataOut) @property - def compress_speed(self) -> Output: + def compress_speed(self) -> Output[float]: r"""Allows to get compress_speed output of the operator the output entity is a double, containing compression speed of the input data: for ElementalNodal location - [elements/sec], for Nodal location - [nodes/sec] @@ -468,7 +485,7 @@ def compress_speed(self) -> Output: return self._compress_speed @property - def compress_ratio(self) -> Output: + def compress_ratio(self) -> Output[float]: r"""Allows to get compress_ratio output of the operator the output entity is a double, containing compression rate = initial/compressed diff --git a/src/ansys/dpf/core/operators/compression/kmeans_clustering.py b/src/ansys/dpf/core/operators/compression/kmeans_clustering.py index 796e5ed687e..40fdf33f5eb 100644 --- a/src/ansys/dpf/core/operators/compression/kmeans_clustering.py +++ b/src/ansys/dpf/core/operators/compression/kmeans_clustering.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scopings_container import ScopingsContainer + class kmeans_clustering(Operator): r"""Apply kMeans clustering to group data depending on the data’s @@ -197,21 +202,25 @@ class InputsKmeansClustering(_Inputs): def __init__(self, op: Operator): super().__init__(kmeans_clustering._spec().inputs, op) - self._clusters_number = Input(kmeans_clustering._spec().input_pin(0), 0, op, -1) + self._clusters_number: Input[int] = Input( + kmeans_clustering._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._clusters_number) - self._formula = Input(kmeans_clustering._spec().input_pin(1), 1, op, -1) + self._formula: Input[str] = Input( + kmeans_clustering._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._formula) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( kmeans_clustering._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._component_number = Input( + self._component_number: Input[int] = Input( kmeans_clustering._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._component_number) @property - def clusters_number(self) -> Input: + def clusters_number(self) -> Input[int]: r"""Allows to connect clusters_number input to the operator. number of the clusters (dafault is 3) @@ -232,7 +241,7 @@ def clusters_number(self) -> Input: return self._clusters_number @property - def formula(self) -> Input: + def formula(self) -> Input[str]: r"""Allows to connect formula input to the operator. formula ('dist'/'dotprod'), default is 'dist' @@ -253,7 +262,7 @@ def formula(self) -> Input: return self._formula @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. an iunput fields container containing the data which will be used for the clustering @@ -274,7 +283,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def component_number(self) -> Input: + def component_number(self) -> Input[int]: r"""Allows to connect component_number input to the operator. component number as an int (default is 0), ex '0' for X-displacement, '1' for Y-displacement,... @@ -309,11 +318,13 @@ class OutputsKmeansClustering(_Outputs): def __init__(self, op: Operator): super().__init__(kmeans_clustering._spec().outputs, op) - self._scoping_clusters = Output(kmeans_clustering._spec().output_pin(0), 0, op) + self._scoping_clusters: Output[ScopingsContainer] = Output( + kmeans_clustering._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping_clusters) @property - def scoping_clusters(self) -> Output: + def scoping_clusters(self) -> Output[ScopingsContainer]: r"""Allows to get scoping_clusters output of the operator Scopings container with the space scoping (entities' ids) corresponding to each of k-clusters diff --git a/src/ansys/dpf/core/operators/compression/quantization.py b/src/ansys/dpf/core/operators/compression/quantization.py index c93ba51274b..6d132532316 100644 --- a/src/ansys/dpf/core/operators/compression/quantization.py +++ b/src/ansys/dpf/core/operators/compression/quantization.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class quantization(Operator): r"""Scales a field to a given precision threshold, then rounds all the @@ -169,13 +173,17 @@ class InputsQuantization(_Inputs): def __init__(self, op: Operator): super().__init__(quantization._spec().inputs, op) - self._input_field = Input(quantization._spec().input_pin(0), 0, op, -1) + self._input_field: Input[Field] = Input( + quantization._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._input_field) - self._threshold = Input(quantization._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + quantization._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) @property - def input_field(self) -> Input: + def input_field(self) -> Input[Field]: r"""Allows to connect input_field input to the operator. Field to quantize. @@ -196,7 +204,7 @@ def input_field(self) -> Input: return self._input_field @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. Precision threshold desired. @@ -236,11 +244,13 @@ class OutputsQuantization(_Outputs): def __init__(self, op: Operator): super().__init__(quantization._spec().outputs, op) - self._output_field = Output(quantization._spec().output_pin(0), 0, op) + self._output_field: Output[Field] = Output( + quantization._spec().output_pin(0), 0, op + ) self._outputs.append(self._output_field) @property - def output_field(self) -> Output: + def output_field(self) -> Output[Field]: r"""Allows to get output_field output of the operator Scaled and rounded field diff --git a/src/ansys/dpf/core/operators/compression/quantization_fc.py b/src/ansys/dpf/core/operators/compression/quantization_fc.py index 2c9525c884d..ed327a539c1 100644 --- a/src/ansys/dpf/core/operators/compression/quantization_fc.py +++ b/src/ansys/dpf/core/operators/compression/quantization_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class quantization_fc(Operator): r"""Scales all the fields of a fields container to a given precision @@ -167,13 +172,17 @@ class InputsQuantizationFc(_Inputs): def __init__(self, op: Operator): super().__init__(quantization_fc._spec().inputs, op) - self._input_fc = Input(quantization_fc._spec().input_pin(0), 0, op, -1) + self._input_fc: Input[FieldsContainer] = Input( + quantization_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._input_fc) - self._threshold = Input(quantization_fc._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field | FieldsContainer] = Input( + quantization_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) @property - def input_fc(self) -> Input: + def input_fc(self) -> Input[FieldsContainer]: r"""Allows to connect input_fc input to the operator. Fields container to be quantized. @@ -194,7 +203,7 @@ def input_fc(self) -> Input: return self._input_fc @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field | FieldsContainer]: r"""Allows to connect threshold input to the operator. Precision threshold desired. @@ -233,11 +242,13 @@ class OutputsQuantizationFc(_Outputs): def __init__(self, op: Operator): super().__init__(quantization_fc._spec().outputs, op) - self._output_fc = Output(quantization_fc._spec().output_pin(0), 0, op) + self._output_fc: Output[FieldsContainer] = Output( + quantization_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._output_fc) @property - def output_fc(self) -> Output: + def output_fc(self) -> Output[FieldsContainer]: r"""Allows to get output_fc output of the operator Quantized fields container. diff --git a/src/ansys/dpf/core/operators/compression/zfp_decompress.py b/src/ansys/dpf/core/operators/compression/zfp_decompress.py index d742e38506d..5e4379629c1 100644 --- a/src/ansys/dpf/core/operators/compression/zfp_decompress.py +++ b/src/ansys/dpf/core/operators/compression/zfp_decompress.py @@ -154,7 +154,7 @@ class InputsZfpDecompress(_Inputs): def __init__(self, op: Operator): super().__init__(zfp_decompress._spec().inputs, op) - self._dataIn = Input(zfp_decompress._spec().input_pin(0), 0, op, -1) + self._dataIn: Input = Input(zfp_decompress._spec().input_pin(0), 0, op, -1) self._inputs.append(self._dataIn) @property @@ -210,11 +210,13 @@ def __init__(self, op: Operator): op, ) self._outputs.append(self.dataOut_as_fields_container) - self._decompress_speed = Output(zfp_decompress._spec().output_pin(1), 1, op) + self._decompress_speed: Output[float] = Output( + zfp_decompress._spec().output_pin(1), 1, op + ) self._outputs.append(self._decompress_speed) @property - def decompress_speed(self) -> Output: + def decompress_speed(self) -> Output[float]: r"""Allows to get decompress_speed output of the operator the output entity is a double, containing decompression speed (mb/sec) diff --git a/src/ansys/dpf/core/operators/filter/abc_weightings.py b/src/ansys/dpf/core/operators/filter/abc_weightings.py index 4deef12117b..bbf911580df 100644 --- a/src/ansys/dpf/core/operators/filter/abc_weightings.py +++ b/src/ansys/dpf/core/operators/filter/abc_weightings.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class abc_weightings(Operator): r"""Computes ABC-weightings for the amplitude spectrum in dB units. @@ -179,17 +183,21 @@ class InputsAbcWeightings(_Inputs): def __init__(self, op: Operator): super().__init__(abc_weightings._spec().inputs, op) - self._fields_container = Input(abc_weightings._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + abc_weightings._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._weighting_type = Input(abc_weightings._spec().input_pin(1), 1, op, -1) + self._weighting_type: Input[int] = Input( + abc_weightings._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._weighting_type) - self._shape_by_tf_scoping = Input( + self._shape_by_tf_scoping: Input[bool] = Input( abc_weightings._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._shape_by_tf_scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. data to be weighted in dB units. @@ -210,7 +218,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def weighting_type(self) -> Input: + def weighting_type(self) -> Input[int]: r"""Allows to connect weighting_type input to the operator. if this pin is set to 0, the A-weighting is computed, 1 the B-weigting is computed and 2 the C-weightings is computed. @@ -231,7 +239,7 @@ def weighting_type(self) -> Input: return self._weighting_type @property - def shape_by_tf_scoping(self) -> Input: + def shape_by_tf_scoping(self) -> Input[bool]: r"""Allows to connect shape_by_tf_scoping input to the operator. if this pin is set to true, each field of the input fields container is defined by time freq scoping and not by ids. Default is false @@ -266,11 +274,13 @@ class OutputsAbcWeightings(_Outputs): def __init__(self, op: Operator): super().__init__(abc_weightings._spec().outputs, op) - self._weightings = Output(abc_weightings._spec().output_pin(0), 0, op) + self._weightings: Output[FieldsContainer] = Output( + abc_weightings._spec().output_pin(0), 0, op + ) self._outputs.append(self._weightings) @property - def weightings(self) -> Output: + def weightings(self) -> Output[FieldsContainer]: r"""Allows to get weightings output of the operator weighted data in dB units. diff --git a/src/ansys/dpf/core/operators/filter/field_band_pass.py b/src/ansys/dpf/core/operators/filter/field_band_pass.py index bf58c4abfe0..494c60b6286 100644 --- a/src/ansys/dpf/core/operators/filter/field_band_pass.py +++ b/src/ansys/dpf/core/operators/filter/field_band_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_band_pass(Operator): r"""The band pass filter returns all the values above (but not equal to) the @@ -182,15 +187,21 @@ class InputsFieldBandPass(_Inputs): def __init__(self, op: Operator): super().__init__(field_band_pass._spec().inputs, op) - self._field = Input(field_band_pass._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + field_band_pass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._min_threshold = Input(field_band_pass._spec().input_pin(1), 1, op, -1) + self._min_threshold: Input[float | Field] = Input( + field_band_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._min_threshold) - self._max_threshold = Input(field_band_pass._spec().input_pin(2), 2, op, -1) + self._max_threshold: Input[float | Field] = Input( + field_band_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._max_threshold) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -211,7 +222,7 @@ def field(self) -> Input: return self._field @property - def min_threshold(self) -> Input: + def min_threshold(self) -> Input[float | Field]: r"""Allows to connect min_threshold input to the operator. A minimum threshold scalar or a field containing one value is expected. @@ -232,7 +243,7 @@ def min_threshold(self) -> Input: return self._min_threshold @property - def max_threshold(self) -> Input: + def max_threshold(self) -> Input[float | Field]: r"""Allows to connect max_threshold input to the operator. A maximum threshold scalar or a field containing one value is expected. @@ -267,11 +278,13 @@ class OutputsFieldBandPass(_Outputs): def __init__(self, op: Operator): super().__init__(field_band_pass._spec().outputs, op) - self._field = Output(field_band_pass._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + field_band_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/field_band_pass_fc.py b/src/ansys/dpf/core/operators/filter/field_band_pass_fc.py index e2a573377ce..1272787ce1f 100644 --- a/src/ansys/dpf/core/operators/filter/field_band_pass_fc.py +++ b/src/ansys/dpf/core/operators/filter/field_band_pass_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_band_pass_fc(Operator): r"""The band pass filter returns all the values above (but not equal to) the @@ -182,17 +187,21 @@ class InputsFieldBandPassFc(_Inputs): def __init__(self, op: Operator): super().__init__(field_band_pass_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( field_band_pass_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._min_threshold = Input(field_band_pass_fc._spec().input_pin(1), 1, op, -1) + self._min_threshold: Input[float | Field] = Input( + field_band_pass_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._min_threshold) - self._max_threshold = Input(field_band_pass_fc._spec().input_pin(2), 2, op, -1) + self._max_threshold: Input[float | Field] = Input( + field_band_pass_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._max_threshold) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -213,7 +222,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def min_threshold(self) -> Input: + def min_threshold(self) -> Input[float | Field]: r"""Allows to connect min_threshold input to the operator. A minimum threshold scalar or a field containing one value is expected. @@ -234,7 +243,7 @@ def min_threshold(self) -> Input: return self._min_threshold @property - def max_threshold(self) -> Input: + def max_threshold(self) -> Input[float | Field]: r"""Allows to connect max_threshold input to the operator. A maximum threshold scalar or a field containing one value is expected. @@ -269,11 +278,13 @@ class OutputsFieldBandPassFc(_Outputs): def __init__(self, op: Operator): super().__init__(field_band_pass_fc._spec().outputs, op) - self._fields_container = Output(field_band_pass_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + field_band_pass_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/field_high_pass.py b/src/ansys/dpf/core/operators/filter/field_high_pass.py index 3ba49521557..78ea61a6e40 100644 --- a/src/ansys/dpf/core/operators/filter/field_high_pass.py +++ b/src/ansys/dpf/core/operators/filter/field_high_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_high_pass(Operator): r"""The high pass filter returns all the values above (but not equal to) the @@ -173,15 +178,19 @@ class InputsFieldHighPass(_Inputs): def __init__(self, op: Operator): super().__init__(field_high_pass._spec().inputs, op) - self._field = Input(field_high_pass._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + field_high_pass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._threshold = Input(field_high_pass._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + field_high_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(field_high_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input(field_high_pass._spec().input_pin(2), 2, op, -1) self._inputs.append(self._both) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -202,7 +211,7 @@ def field(self) -> Input: return self._field @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -223,7 +232,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -258,11 +267,13 @@ class OutputsFieldHighPass(_Outputs): def __init__(self, op: Operator): super().__init__(field_high_pass._spec().outputs, op) - self._field = Output(field_high_pass._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + field_high_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/field_high_pass_fc.py b/src/ansys/dpf/core/operators/filter/field_high_pass_fc.py index f6036337bf4..900b81f3aa6 100644 --- a/src/ansys/dpf/core/operators/filter/field_high_pass_fc.py +++ b/src/ansys/dpf/core/operators/filter/field_high_pass_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_high_pass_fc(Operator): r"""The high pass filter returns all the values above (but not equal to) the @@ -175,17 +180,21 @@ class InputsFieldHighPassFc(_Inputs): def __init__(self, op: Operator): super().__init__(field_high_pass_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( field_high_pass_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._threshold = Input(field_high_pass_fc._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + field_high_pass_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(field_high_pass_fc._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + field_high_pass_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -206,7 +215,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -227,7 +236,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -262,11 +271,13 @@ class OutputsFieldHighPassFc(_Outputs): def __init__(self, op: Operator): super().__init__(field_high_pass_fc._spec().outputs, op) - self._fields_container = Output(field_high_pass_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + field_high_pass_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/field_low_pass.py b/src/ansys/dpf/core/operators/filter/field_low_pass.py index 6e50ae45d02..0f7fca9de65 100644 --- a/src/ansys/dpf/core/operators/filter/field_low_pass.py +++ b/src/ansys/dpf/core/operators/filter/field_low_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_low_pass(Operator): r"""The low pass filter returns all the values below (but not equal to) the @@ -173,15 +178,19 @@ class InputsFieldLowPass(_Inputs): def __init__(self, op: Operator): super().__init__(field_low_pass._spec().inputs, op) - self._field = Input(field_low_pass._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + field_low_pass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._threshold = Input(field_low_pass._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + field_low_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(field_low_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input(field_low_pass._spec().input_pin(2), 2, op, -1) self._inputs.append(self._both) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -202,7 +211,7 @@ def field(self) -> Input: return self._field @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. a threshold scalar or a field containing one value is expected @@ -223,7 +232,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -258,11 +267,11 @@ class OutputsFieldLowPass(_Outputs): def __init__(self, op: Operator): super().__init__(field_low_pass._spec().outputs, op) - self._field = Output(field_low_pass._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(field_low_pass._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/field_low_pass_fc.py b/src/ansys/dpf/core/operators/filter/field_low_pass_fc.py index 96e75cf7a22..664df52f438 100644 --- a/src/ansys/dpf/core/operators/filter/field_low_pass_fc.py +++ b/src/ansys/dpf/core/operators/filter/field_low_pass_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_low_pass_fc(Operator): r"""The low pass filter returns all the values below (but not equal to) the @@ -175,17 +180,21 @@ class InputsFieldLowPassFc(_Inputs): def __init__(self, op: Operator): super().__init__(field_low_pass_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( field_low_pass_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._threshold = Input(field_low_pass_fc._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + field_low_pass_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(field_low_pass_fc._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + field_low_pass_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -206,7 +215,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. a threshold scalar or a field containing one value is expected @@ -227,7 +236,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -262,11 +271,13 @@ class OutputsFieldLowPassFc(_Outputs): def __init__(self, op: Operator): super().__init__(field_low_pass_fc._spec().outputs, op) - self._fields_container = Output(field_low_pass_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + field_low_pass_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/field_signed_high_pass.py b/src/ansys/dpf/core/operators/filter/field_signed_high_pass.py index 079c684b5f0..36ecd323c2f 100644 --- a/src/ansys/dpf/core/operators/filter/field_signed_high_pass.py +++ b/src/ansys/dpf/core/operators/filter/field_signed_high_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_signed_high_pass(Operator): r"""The high pass filter returns all the values above, or equal, in absolute @@ -177,15 +182,21 @@ class InputsFieldSignedHighPass(_Inputs): def __init__(self, op: Operator): super().__init__(field_signed_high_pass._spec().inputs, op) - self._field = Input(field_signed_high_pass._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + field_signed_high_pass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._threshold = Input(field_signed_high_pass._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + field_signed_high_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(field_signed_high_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + field_signed_high_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -206,7 +217,7 @@ def field(self) -> Input: return self._field @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -227,7 +238,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -262,11 +273,13 @@ class OutputsFieldSignedHighPass(_Outputs): def __init__(self, op: Operator): super().__init__(field_signed_high_pass._spec().outputs, op) - self._field = Output(field_signed_high_pass._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + field_signed_high_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/field_signed_high_pass_fc.py b/src/ansys/dpf/core/operators/filter/field_signed_high_pass_fc.py index e1ecfea3139..e797a607196 100644 --- a/src/ansys/dpf/core/operators/filter/field_signed_high_pass_fc.py +++ b/src/ansys/dpf/core/operators/filter/field_signed_high_pass_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_signed_high_pass_fc(Operator): r"""The high pass filter returns all the values above, or equal, in absolute @@ -179,19 +184,21 @@ class InputsFieldSignedHighPassFc(_Inputs): def __init__(self, op: Operator): super().__init__(field_signed_high_pass_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( field_signed_high_pass_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._threshold = Input( + self._threshold: Input[float | Field] = Input( field_signed_high_pass_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._threshold) - self._both = Input(field_signed_high_pass_fc._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + field_signed_high_pass_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -212,7 +219,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -233,7 +240,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -268,13 +275,13 @@ class OutputsFieldSignedHighPassFc(_Outputs): def __init__(self, op: Operator): super().__init__(field_signed_high_pass_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( field_signed_high_pass_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/filtering_max_over_time.py b/src/ansys/dpf/core/operators/filter/filtering_max_over_time.py index e8355c7b242..554ccab130b 100644 --- a/src/ansys/dpf/core/operators/filter/filtering_max_over_time.py +++ b/src/ansys/dpf/core/operators/filter/filtering_max_over_time.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.workflow import Workflow + class filtering_max_over_time(Operator): r"""Creates a filtering workflow that will filter results based on a @@ -196,23 +200,25 @@ class InputsFilteringMaxOverTime(_Inputs): def __init__(self, op: Operator): super().__init__(filtering_max_over_time._spec().inputs, op) - self._invariant_fc_operator = Input( + self._invariant_fc_operator: Input[str] = Input( filtering_max_over_time._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._invariant_fc_operator) - self._output_pin = Input( + self._output_pin: Input[int] = Input( filtering_max_over_time._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._output_pin) - self._list_of_results = Input( + self._list_of_results: Input[str] = Input( filtering_max_over_time._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._list_of_results) - self._threshold = Input(filtering_max_over_time._spec().input_pin(3), 3, op, -1) + self._threshold: Input[float] = Input( + filtering_max_over_time._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._threshold) @property - def invariant_fc_operator(self) -> Input: + def invariant_fc_operator(self) -> Input[str]: r"""Allows to connect invariant_fc_operator input to the operator. Name of the invariant operator to be used to calculate filter (available: eqv_fc, invariants_deriv_fc, invariants_fc). @@ -233,7 +239,7 @@ def invariant_fc_operator(self) -> Input: return self._invariant_fc_operator @property - def output_pin(self) -> Input: + def output_pin(self) -> Input[int]: r"""Allows to connect output_pin input to the operator. Output pin of the invariant operator. Default = 0. @@ -254,7 +260,7 @@ def output_pin(self) -> Input: return self._output_pin @property - def list_of_results(self) -> Input: + def list_of_results(self) -> Input[str]: r"""Allows to connect list_of_results input to the operator. If no result is given, filter will be applied on Stresses and Strains @@ -275,7 +281,7 @@ def list_of_results(self) -> Input: return self._list_of_results @property - def threshold(self) -> Input: + def threshold(self) -> Input[float]: r"""Allows to connect threshold input to the operator. Threshold from which the operator will filter. @@ -310,11 +316,13 @@ class OutputsFilteringMaxOverTime(_Outputs): def __init__(self, op: Operator): super().__init__(filtering_max_over_time._spec().outputs, op) - self._workflow = Output(filtering_max_over_time._spec().output_pin(0), 0, op) + self._workflow: Output[Workflow] = Output( + filtering_max_over_time._spec().output_pin(0), 0, op + ) self._outputs.append(self._workflow) @property - def workflow(self) -> Output: + def workflow(self) -> Output[Workflow]: r"""Allows to get workflow output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/scoping_band_pass.py b/src/ansys/dpf/core/operators/filter/scoping_band_pass.py index 5e5c4f892e3..b4c76d66765 100644 --- a/src/ansys/dpf/core/operators/filter/scoping_band_pass.py +++ b/src/ansys/dpf/core/operators/filter/scoping_band_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class scoping_band_pass(Operator): r"""The band pass filter returns all the values above (but not equal to) the @@ -182,15 +188,21 @@ class InputsScopingBandPass(_Inputs): def __init__(self, op: Operator): super().__init__(scoping_band_pass._spec().inputs, op) - self._field = Input(scoping_band_pass._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + scoping_band_pass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._min_threshold = Input(scoping_band_pass._spec().input_pin(1), 1, op, -1) + self._min_threshold: Input[float | Field] = Input( + scoping_band_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._min_threshold) - self._max_threshold = Input(scoping_band_pass._spec().input_pin(2), 2, op, -1) + self._max_threshold: Input[float | Field] = Input( + scoping_band_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._max_threshold) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -211,7 +223,7 @@ def field(self) -> Input: return self._field @property - def min_threshold(self) -> Input: + def min_threshold(self) -> Input[float | Field]: r"""Allows to connect min_threshold input to the operator. A minimum threshold scalar or a field containing one value is expected. @@ -232,7 +244,7 @@ def min_threshold(self) -> Input: return self._min_threshold @property - def max_threshold(self) -> Input: + def max_threshold(self) -> Input[float | Field]: r"""Allows to connect max_threshold input to the operator. A maximum threshold scalar or a field containing one value is expected. @@ -267,11 +279,13 @@ class OutputsScopingBandPass(_Outputs): def __init__(self, op: Operator): super().__init__(scoping_band_pass._spec().outputs, op) - self._scoping = Output(scoping_band_pass._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + scoping_band_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/scoping_high_pass.py b/src/ansys/dpf/core/operators/filter/scoping_high_pass.py index 087f6e4693c..55193c6c739 100644 --- a/src/ansys/dpf/core/operators/filter/scoping_high_pass.py +++ b/src/ansys/dpf/core/operators/filter/scoping_high_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class scoping_high_pass(Operator): r"""The high pass filter returns all the values above (but not equal to) the @@ -173,15 +179,21 @@ class InputsScopingHighPass(_Inputs): def __init__(self, op: Operator): super().__init__(scoping_high_pass._spec().inputs, op) - self._field = Input(scoping_high_pass._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + scoping_high_pass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._threshold = Input(scoping_high_pass._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + scoping_high_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(scoping_high_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + scoping_high_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -202,7 +214,7 @@ def field(self) -> Input: return self._field @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -223,7 +235,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -258,11 +270,13 @@ class OutputsScopingHighPass(_Outputs): def __init__(self, op: Operator): super().__init__(scoping_high_pass._spec().outputs, op) - self._scoping = Output(scoping_high_pass._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + scoping_high_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/scoping_low_pass.py b/src/ansys/dpf/core/operators/filter/scoping_low_pass.py index 190bc8d79ac..2033ba102f2 100644 --- a/src/ansys/dpf/core/operators/filter/scoping_low_pass.py +++ b/src/ansys/dpf/core/operators/filter/scoping_low_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class scoping_low_pass(Operator): r"""The low pass filter returns all the values below (but not equal to) the @@ -173,15 +179,21 @@ class InputsScopingLowPass(_Inputs): def __init__(self, op: Operator): super().__init__(scoping_low_pass._spec().inputs, op) - self._field = Input(scoping_low_pass._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + scoping_low_pass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._threshold = Input(scoping_low_pass._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + scoping_low_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(scoping_low_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + scoping_low_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -202,7 +214,7 @@ def field(self) -> Input: return self._field @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. a threshold scalar or a field containing one value is expected @@ -223,7 +235,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -258,11 +270,13 @@ class OutputsScopingLowPass(_Outputs): def __init__(self, op: Operator): super().__init__(scoping_low_pass._spec().outputs, op) - self._scoping = Output(scoping_low_pass._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + scoping_low_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/scoping_signed_high_pass.py b/src/ansys/dpf/core/operators/filter/scoping_signed_high_pass.py index e4f7e30e4bb..e6682216f50 100644 --- a/src/ansys/dpf/core/operators/filter/scoping_signed_high_pass.py +++ b/src/ansys/dpf/core/operators/filter/scoping_signed_high_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class scoping_signed_high_pass(Operator): r"""The high pass filter returns all the values above, or equal, in absolute @@ -177,17 +183,21 @@ class InputsScopingSignedHighPass(_Inputs): def __init__(self, op: Operator): super().__init__(scoping_signed_high_pass._spec().inputs, op) - self._field = Input(scoping_signed_high_pass._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + scoping_signed_high_pass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._threshold = Input( + self._threshold: Input[float | Field] = Input( scoping_signed_high_pass._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._threshold) - self._both = Input(scoping_signed_high_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + scoping_signed_high_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -208,7 +218,7 @@ def field(self) -> Input: return self._field @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -229,7 +239,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -264,11 +274,13 @@ class OutputsScopingSignedHighPass(_Outputs): def __init__(self, op: Operator): super().__init__(scoping_signed_high_pass._spec().outputs, op) - self._scoping = Output(scoping_signed_high_pass._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + scoping_signed_high_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/timefreq_band_pass.py b/src/ansys/dpf/core/operators/filter/timefreq_band_pass.py index 37cf417e1ad..b17d51d4174 100644 --- a/src/ansys/dpf/core/operators/filter/timefreq_band_pass.py +++ b/src/ansys/dpf/core/operators/filter/timefreq_band_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class timefreq_band_pass(Operator): r"""The band pass filter returns all the values above (but not equal to) the @@ -189,17 +195,21 @@ class InputsTimefreqBandPass(_Inputs): def __init__(self, op: Operator): super().__init__(timefreq_band_pass._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( timefreq_band_pass._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._min_threshold = Input(timefreq_band_pass._spec().input_pin(1), 1, op, -1) + self._min_threshold: Input[float | Field] = Input( + timefreq_band_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._min_threshold) - self._max_threshold = Input(timefreq_band_pass._spec().input_pin(2), 2, op, -1) + self._max_threshold: Input[float | Field] = Input( + timefreq_band_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._max_threshold) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -218,7 +228,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def min_threshold(self) -> Input: + def min_threshold(self) -> Input[float | Field]: r"""Allows to connect min_threshold input to the operator. A minimum threshold scalar or a field containing one value is expected. @@ -239,7 +249,7 @@ def min_threshold(self) -> Input: return self._min_threshold @property - def max_threshold(self) -> Input: + def max_threshold(self) -> Input[float | Field]: r"""Allows to connect max_threshold input to the operator. A maximum threshold scalar or a field containing one value is expected. @@ -275,15 +285,17 @@ class OutputsTimefreqBandPass(_Outputs): def __init__(self, op: Operator): super().__init__(timefreq_band_pass._spec().outputs, op) - self._time_freq_support = Output( + self._time_freq_support: Output[TimeFreqSupport] = Output( timefreq_band_pass._spec().output_pin(0), 0, op ) self._outputs.append(self._time_freq_support) - self._scoping = Output(timefreq_band_pass._spec().output_pin(1), 1, op) + self._scoping: Output[Scoping] = Output( + timefreq_band_pass._spec().output_pin(1), 1, op + ) self._outputs.append(self._scoping) @property - def time_freq_support(self) -> Output: + def time_freq_support(self) -> Output[TimeFreqSupport]: r"""Allows to get time_freq_support output of the operator Returns @@ -301,7 +313,7 @@ def time_freq_support(self) -> Output: return self._time_freq_support @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/timefreq_high_pass.py b/src/ansys/dpf/core/operators/filter/timefreq_high_pass.py index d0719f2d14f..744a2e4c335 100644 --- a/src/ansys/dpf/core/operators/filter/timefreq_high_pass.py +++ b/src/ansys/dpf/core/operators/filter/timefreq_high_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class timefreq_high_pass(Operator): r"""The high pass filter returns all the values above (but not equal to) the @@ -187,17 +193,21 @@ class InputsTimefreqHighPass(_Inputs): def __init__(self, op: Operator): super().__init__(timefreq_high_pass._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( timefreq_high_pass._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._threshold = Input(timefreq_high_pass._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + timefreq_high_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(timefreq_high_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + timefreq_high_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -216,7 +226,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -237,7 +247,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -273,15 +283,17 @@ class OutputsTimefreqHighPass(_Outputs): def __init__(self, op: Operator): super().__init__(timefreq_high_pass._spec().outputs, op) - self._time_freq_support = Output( + self._time_freq_support: Output[TimeFreqSupport] = Output( timefreq_high_pass._spec().output_pin(0), 0, op ) self._outputs.append(self._time_freq_support) - self._scoping = Output(timefreq_high_pass._spec().output_pin(1), 1, op) + self._scoping: Output[Scoping] = Output( + timefreq_high_pass._spec().output_pin(1), 1, op + ) self._outputs.append(self._scoping) @property - def time_freq_support(self) -> Output: + def time_freq_support(self) -> Output[TimeFreqSupport]: r"""Allows to get time_freq_support output of the operator Returns @@ -299,7 +311,7 @@ def time_freq_support(self) -> Output: return self._time_freq_support @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/timefreq_low_pass.py b/src/ansys/dpf/core/operators/filter/timefreq_low_pass.py index 3d3e9566c71..f798e77f921 100644 --- a/src/ansys/dpf/core/operators/filter/timefreq_low_pass.py +++ b/src/ansys/dpf/core/operators/filter/timefreq_low_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class timefreq_low_pass(Operator): r"""The low pass filter returns all the values below (but not equal to) the @@ -187,17 +193,21 @@ class InputsTimefreqLowPass(_Inputs): def __init__(self, op: Operator): super().__init__(timefreq_low_pass._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( timefreq_low_pass._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._threshold = Input(timefreq_low_pass._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + timefreq_low_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(timefreq_low_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + timefreq_low_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -216,7 +226,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. a threshold scalar or a field containing one value is expected @@ -237,7 +247,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -273,13 +283,17 @@ class OutputsTimefreqLowPass(_Outputs): def __init__(self, op: Operator): super().__init__(timefreq_low_pass._spec().outputs, op) - self._time_freq_support = Output(timefreq_low_pass._spec().output_pin(0), 0, op) + self._time_freq_support: Output[TimeFreqSupport] = Output( + timefreq_low_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._time_freq_support) - self._scoping = Output(timefreq_low_pass._spec().output_pin(1), 1, op) + self._scoping: Output[Scoping] = Output( + timefreq_low_pass._spec().output_pin(1), 1, op + ) self._outputs.append(self._scoping) @property - def time_freq_support(self) -> Output: + def time_freq_support(self) -> Output[TimeFreqSupport]: r"""Allows to get time_freq_support output of the operator Returns @@ -297,7 +311,7 @@ def time_freq_support(self) -> Output: return self._time_freq_support @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/timefreq_signed_high_pass.py b/src/ansys/dpf/core/operators/filter/timefreq_signed_high_pass.py index c80b9674eea..6f9123e9eac 100644 --- a/src/ansys/dpf/core/operators/filter/timefreq_signed_high_pass.py +++ b/src/ansys/dpf/core/operators/filter/timefreq_signed_high_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class timefreq_signed_high_pass(Operator): r"""The high pass filter returns all the values above, or equal, in absolute @@ -191,19 +197,21 @@ class InputsTimefreqSignedHighPass(_Inputs): def __init__(self, op: Operator): super().__init__(timefreq_signed_high_pass._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( timefreq_signed_high_pass._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._threshold = Input( + self._threshold: Input[float | Field] = Input( timefreq_signed_high_pass._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._threshold) - self._both = Input(timefreq_signed_high_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + timefreq_signed_high_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -222,7 +230,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -243,7 +251,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -279,15 +287,17 @@ class OutputsTimefreqSignedHighPass(_Outputs): def __init__(self, op: Operator): super().__init__(timefreq_signed_high_pass._spec().outputs, op) - self._time_freq_support = Output( + self._time_freq_support: Output[TimeFreqSupport] = Output( timefreq_signed_high_pass._spec().output_pin(0), 0, op ) self._outputs.append(self._time_freq_support) - self._scoping = Output(timefreq_signed_high_pass._spec().output_pin(1), 1, op) + self._scoping: Output[Scoping] = Output( + timefreq_signed_high_pass._spec().output_pin(1), 1, op + ) self._outputs.append(self._scoping) @property - def time_freq_support(self) -> Output: + def time_freq_support(self) -> Output[TimeFreqSupport]: r"""Allows to get time_freq_support output of the operator Returns @@ -305,7 +315,7 @@ def time_freq_support(self) -> Output: return self._time_freq_support @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/timescoping_band_pass.py b/src/ansys/dpf/core/operators/filter/timescoping_band_pass.py index 406e1044150..fd9cb16be1b 100644 --- a/src/ansys/dpf/core/operators/filter/timescoping_band_pass.py +++ b/src/ansys/dpf/core/operators/filter/timescoping_band_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class timescoping_band_pass(Operator): r"""The band pass filter returns all the values above (but not equal to) the @@ -185,21 +191,21 @@ class InputsTimescopingBandPass(_Inputs): def __init__(self, op: Operator): super().__init__(timescoping_band_pass._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( timescoping_band_pass._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._min_threshold = Input( + self._min_threshold: Input[float | Field] = Input( timescoping_band_pass._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._min_threshold) - self._max_threshold = Input( + self._max_threshold: Input[float | Field] = Input( timescoping_band_pass._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._max_threshold) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -218,7 +224,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def min_threshold(self) -> Input: + def min_threshold(self) -> Input[float | Field]: r"""Allows to connect min_threshold input to the operator. A minimum threshold scalar or a field containing one value is expected. @@ -239,7 +245,7 @@ def min_threshold(self) -> Input: return self._min_threshold @property - def max_threshold(self) -> Input: + def max_threshold(self) -> Input[float | Field]: r"""Allows to connect max_threshold input to the operator. A maximum threshold scalar or a field containing one value is expected. @@ -274,11 +280,13 @@ class OutputsTimescopingBandPass(_Outputs): def __init__(self, op: Operator): super().__init__(timescoping_band_pass._spec().outputs, op) - self._scoping = Output(timescoping_band_pass._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + timescoping_band_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/timescoping_high_pass.py b/src/ansys/dpf/core/operators/filter/timescoping_high_pass.py index 9b430c62330..59efd9fc5d8 100644 --- a/src/ansys/dpf/core/operators/filter/timescoping_high_pass.py +++ b/src/ansys/dpf/core/operators/filter/timescoping_high_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class timescoping_high_pass(Operator): r"""The high pass filter returns all the values above (but not equal to) the @@ -183,17 +189,21 @@ class InputsTimescopingHighPass(_Inputs): def __init__(self, op: Operator): super().__init__(timescoping_high_pass._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( timescoping_high_pass._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._threshold = Input(timescoping_high_pass._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + timescoping_high_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(timescoping_high_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + timescoping_high_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -212,7 +222,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -233,7 +243,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -268,11 +278,13 @@ class OutputsTimescopingHighPass(_Outputs): def __init__(self, op: Operator): super().__init__(timescoping_high_pass._spec().outputs, op) - self._scoping = Output(timescoping_high_pass._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + timescoping_high_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/timescoping_low_pass.py b/src/ansys/dpf/core/operators/filter/timescoping_low_pass.py index ce9eabe05ad..cc7f24cba54 100644 --- a/src/ansys/dpf/core/operators/filter/timescoping_low_pass.py +++ b/src/ansys/dpf/core/operators/filter/timescoping_low_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class timescoping_low_pass(Operator): r"""The low pass filter returns all the values below (but not equal to) the @@ -183,17 +189,21 @@ class InputsTimescopingLowPass(_Inputs): def __init__(self, op: Operator): super().__init__(timescoping_low_pass._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( timescoping_low_pass._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._threshold = Input(timescoping_low_pass._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float | Field] = Input( + timescoping_low_pass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._threshold) - self._both = Input(timescoping_low_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + timescoping_low_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -212,7 +222,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. a threshold scalar or a field containing one value is expected @@ -233,7 +243,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -268,11 +278,13 @@ class OutputsTimescopingLowPass(_Outputs): def __init__(self, op: Operator): super().__init__(timescoping_low_pass._spec().outputs, op) - self._scoping = Output(timescoping_low_pass._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + timescoping_low_pass._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/filter/timescoping_signed_high_pass.py b/src/ansys/dpf/core/operators/filter/timescoping_signed_high_pass.py index f6d44956d84..c3350b6d2af 100644 --- a/src/ansys/dpf/core/operators/filter/timescoping_signed_high_pass.py +++ b/src/ansys/dpf/core/operators/filter/timescoping_signed_high_pass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class timescoping_signed_high_pass(Operator): r"""The high pass filter returns all the values above, or equal, in absolute @@ -183,19 +189,21 @@ class InputsTimescopingSignedHighPass(_Inputs): def __init__(self, op: Operator): super().__init__(timescoping_signed_high_pass._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( timescoping_signed_high_pass._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._threshold = Input( + self._threshold: Input[float | Field] = Input( timescoping_signed_high_pass._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._threshold) - self._both = Input(timescoping_signed_high_pass._spec().input_pin(2), 2, op, -1) + self._both: Input[bool] = Input( + timescoping_signed_high_pass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._both) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -214,7 +222,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def threshold(self) -> Input: + def threshold(self) -> Input[float | Field]: r"""Allows to connect threshold input to the operator. A threshold scalar or a field containing one value is expected. @@ -235,7 +243,7 @@ def threshold(self) -> Input: return self._threshold @property - def both(self) -> Input: + def both(self) -> Input[bool]: r"""Allows to connect both input to the operator. The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1. @@ -270,13 +278,13 @@ class OutputsTimescopingSignedHighPass(_Outputs): def __init__(self, op: Operator): super().__init__(timescoping_signed_high_pass._spec().outputs, op) - self._scoping = Output( + self._scoping: Output[Scoping] = Output( timescoping_signed_high_pass._spec().output_pin(0), 0, op ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/cartesian_to_spherical.py b/src/ansys/dpf/core/operators/geo/cartesian_to_spherical.py index 5a967eb8130..017e0664291 100644 --- a/src/ansys/dpf/core/operators/geo/cartesian_to_spherical.py +++ b/src/ansys/dpf/core/operators/geo/cartesian_to_spherical.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class cartesian_to_spherical(Operator): r"""Converts 3D field from cartesian coordinates to spherical coordinates. @@ -140,11 +145,13 @@ class InputsCartesianToSpherical(_Inputs): def __init__(self, op: Operator): super().__init__(cartesian_to_spherical._spec().inputs, op) - self._field = Input(cartesian_to_spherical._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + cartesian_to_spherical._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns @@ -177,11 +184,13 @@ class OutputsCartesianToSpherical(_Outputs): def __init__(self, op: Operator): super().__init__(cartesian_to_spherical._spec().outputs, op) - self._field = Output(cartesian_to_spherical._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + cartesian_to_spherical._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/cartesian_to_spherical_fc.py b/src/ansys/dpf/core/operators/geo/cartesian_to_spherical_fc.py index 9334caa6aa0..8ff33ed59d3 100644 --- a/src/ansys/dpf/core/operators/geo/cartesian_to_spherical_fc.py +++ b/src/ansys/dpf/core/operators/geo/cartesian_to_spherical_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class cartesian_to_spherical_fc(Operator): r"""Converts 3D field from cartesian coordinates to spherical coordinates. @@ -140,13 +144,13 @@ class InputsCartesianToSphericalFc(_Inputs): def __init__(self, op: Operator): super().__init__(cartesian_to_spherical_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( cartesian_to_spherical_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -179,13 +183,13 @@ class OutputsCartesianToSphericalFc(_Outputs): def __init__(self, op: Operator): super().__init__(cartesian_to_spherical_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( cartesian_to_spherical_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/element_nodal_contribution.py b/src/ansys/dpf/core/operators/geo/element_nodal_contribution.py index 4c6f85ed060..c2097ea28b2 100644 --- a/src/ansys/dpf/core/operators/geo/element_nodal_contribution.py +++ b/src/ansys/dpf/core/operators/geo/element_nodal_contribution.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class element_nodal_contribution(Operator): r"""Compute the fraction of the element measure attributed to each node of @@ -184,19 +190,21 @@ class InputsElementNodalContribution(_Inputs): def __init__(self, op: Operator): super().__init__(element_nodal_contribution._spec().inputs, op) - self._mesh = Input(element_nodal_contribution._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + element_nodal_contribution._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._scoping = Input( + self._scoping: Input[Scoping] = Input( element_nodal_contribution._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._scoping) - self._volume_fraction = Input( + self._volume_fraction: Input[bool] = Input( element_nodal_contribution._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._volume_fraction) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -215,7 +223,7 @@ def mesh(self) -> Input: return self._mesh @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Integrate the input field over a specific scoping. @@ -236,7 +244,7 @@ def scoping(self) -> Input: return self._scoping @property - def volume_fraction(self) -> Input: + def volume_fraction(self) -> Input[bool]: r"""Allows to connect volume_fraction input to the operator. If true, returns influence volume, area or length. If false, the values are normalized with the element volume, area or length. Default: true. @@ -271,11 +279,13 @@ class OutputsElementNodalContribution(_Outputs): def __init__(self, op: Operator): super().__init__(element_nodal_contribution._spec().outputs, op) - self._field = Output(element_nodal_contribution._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + element_nodal_contribution._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py b/src/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py index cbd1c9f934e..49cb4766472 100644 --- a/src/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py +++ b/src/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class elements_facets_surfaces_over_time(Operator): r"""Calculates for a mesh, the surface of each element’s facet over time for @@ -186,21 +192,21 @@ class InputsElementsFacetsSurfacesOverTime(_Inputs): def __init__(self, op: Operator): super().__init__(elements_facets_surfaces_over_time._spec().inputs, op) - self._scoping = Input( + self._scoping: Input[Scoping] = Input( elements_facets_surfaces_over_time._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._scoping) - self._displacement = Input( + self._displacement: Input[FieldsContainer] = Input( elements_facets_surfaces_over_time._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._displacement) - self._mesh = Input( + self._mesh: Input[MeshedRegion] = Input( elements_facets_surfaces_over_time._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Returns @@ -219,7 +225,7 @@ def scoping(self) -> Input: return self._scoping @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer]: r"""Allows to connect displacement input to the operator. Displacement field's container. @@ -240,7 +246,7 @@ def displacement(self) -> Input: return self._displacement @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement. @@ -276,17 +282,17 @@ class OutputsElementsFacetsSurfacesOverTime(_Outputs): def __init__(self, op: Operator): super().__init__(elements_facets_surfaces_over_time._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elements_facets_surfaces_over_time._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) - self._mesh = Output( + self._mesh: Output[MeshedRegion] = Output( elements_facets_surfaces_over_time._spec().output_pin(1), 1, op ) self._outputs.append(self._mesh) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Surfaces field. @@ -306,7 +312,7 @@ def fields_container(self) -> Output: return self._fields_container @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Mesh made of surface elements only. diff --git a/src/ansys/dpf/core/operators/geo/elements_volume.py b/src/ansys/dpf/core/operators/geo/elements_volume.py index 831d59f531f..b751aa3d248 100644 --- a/src/ansys/dpf/core/operators/geo/elements_volume.py +++ b/src/ansys/dpf/core/operators/geo/elements_volume.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class elements_volume(Operator): r"""Compute the measure of the Elements (volume for 3D elements, surface for @@ -159,13 +165,17 @@ class InputsElementsVolume(_Inputs): def __init__(self, op: Operator): super().__init__(elements_volume._spec().inputs, op) - self._mesh = Input(elements_volume._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + elements_volume._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._mesh_scoping = Input(elements_volume._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + elements_volume._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -184,7 +194,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. If not provided, the measure of all elements for the mesh is computed. If provided, the Scoping needs to have "Elemental" location. @@ -219,11 +229,13 @@ class OutputsElementsVolume(_Outputs): def __init__(self, op: Operator): super().__init__(elements_volume._spec().outputs, op) - self._field = Output(elements_volume._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + elements_volume._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/elements_volumes_over_time.py b/src/ansys/dpf/core/operators/geo/elements_volumes_over_time.py index 03674f3d5e1..4d951804c91 100644 --- a/src/ansys/dpf/core/operators/geo/elements_volumes_over_time.py +++ b/src/ansys/dpf/core/operators/geo/elements_volumes_over_time.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class elements_volumes_over_time(Operator): r"""Calculates for a mesh, the volume of each element over time for each @@ -174,19 +180,21 @@ class InputsElementsVolumesOverTime(_Inputs): def __init__(self, op: Operator): super().__init__(elements_volumes_over_time._spec().inputs, op) - self._scoping = Input( + self._scoping: Input[Scoping] = Input( elements_volumes_over_time._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._scoping) - self._displacement = Input( + self._displacement: Input[FieldsContainer] = Input( elements_volumes_over_time._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._displacement) - self._mesh = Input(elements_volumes_over_time._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + elements_volumes_over_time._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Returns @@ -205,7 +213,7 @@ def scoping(self) -> Input: return self._scoping @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer]: r"""Allows to connect displacement input to the operator. Displacement field's container. Must contain the mesh if mesh not specified in input. @@ -226,7 +234,7 @@ def displacement(self) -> Input: return self._displacement @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement. @@ -261,13 +269,13 @@ class OutputsElementsVolumesOverTime(_Outputs): def __init__(self, op: Operator): super().__init__(elements_volumes_over_time._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elements_volumes_over_time._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/faces_area.py b/src/ansys/dpf/core/operators/geo/faces_area.py index 8dbe2b21832..19079ec5468 100644 --- a/src/ansys/dpf/core/operators/geo/faces_area.py +++ b/src/ansys/dpf/core/operators/geo/faces_area.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class faces_area(Operator): r"""Compute the measure of the Faces (surface for 2D faces of a 3D model or @@ -159,13 +165,17 @@ class InputsFacesArea(_Inputs): def __init__(self, op: Operator): super().__init__(faces_area._spec().inputs, op) - self._mesh = Input(faces_area._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + faces_area._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._mesh_scoping = Input(faces_area._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + faces_area._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -184,7 +194,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. If not provided, the measure of all Faces in the mesh is computed. If provided, the Scoping needs to have "Faces" location. @@ -219,11 +229,11 @@ class OutputsFacesArea(_Outputs): def __init__(self, op: Operator): super().__init__(faces_area._spec().outputs, op) - self._field = Output(faces_area._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(faces_area._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/gauss_to_node.py b/src/ansys/dpf/core/operators/geo/gauss_to_node.py index 436f2d32a72..a5b33dd1119 100644 --- a/src/ansys/dpf/core/operators/geo/gauss_to_node.py +++ b/src/ansys/dpf/core/operators/geo/gauss_to_node.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class gauss_to_node(Operator): r"""Extrapolating results available at Gauss or quadrature points to nodal @@ -176,15 +182,19 @@ class InputsGaussToNode(_Inputs): def __init__(self, op: Operator): super().__init__(gauss_to_node._spec().inputs, op) - self._field = Input(gauss_to_node._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(gauss_to_node._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._scoping = Input(gauss_to_node._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + gauss_to_node._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._mesh = Input(gauss_to_node._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + gauss_to_node._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -203,7 +213,7 @@ def field(self) -> Input: return self._field @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Scoping to integrate on, if not provided, the one from input field is provided. @@ -224,7 +234,7 @@ def scoping(self) -> Input: return self._scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh to integrate on. @@ -259,11 +269,11 @@ class OutputsGaussToNode(_Outputs): def __init__(self, op: Operator): super().__init__(gauss_to_node._spec().outputs, op) - self._field = Output(gauss_to_node._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(gauss_to_node._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/integrate_over_elements.py b/src/ansys/dpf/core/operators/geo/integrate_over_elements.py index 15cb8db82a4..8973e8dacc5 100644 --- a/src/ansys/dpf/core/operators/geo/integrate_over_elements.py +++ b/src/ansys/dpf/core/operators/geo/integrate_over_elements.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class integrate_over_elements(Operator): r"""Integration of an input field over mesh. @@ -170,15 +176,21 @@ class InputsIntegrateOverElements(_Inputs): def __init__(self, op: Operator): super().__init__(integrate_over_elements._spec().inputs, op) - self._field = Input(integrate_over_elements._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input( + integrate_over_elements._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._scoping = Input(integrate_over_elements._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + integrate_over_elements._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._mesh = Input(integrate_over_elements._spec().input_pin(2), 2, op, -1) + self._mesh: Input[MeshedRegion] = Input( + integrate_over_elements._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -197,7 +209,7 @@ def field(self) -> Input: return self._field @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Integrate the input field over a specific scoping. @@ -218,7 +230,7 @@ def scoping(self) -> Input: return self._scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh to integrate on. If not provided, the one from input field is employed. @@ -253,11 +265,13 @@ class OutputsIntegrateOverElements(_Outputs): def __init__(self, op: Operator): super().__init__(integrate_over_elements._spec().outputs, op) - self._field = Output(integrate_over_elements._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + integrate_over_elements._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/normals.py b/src/ansys/dpf/core/operators/geo/normals.py index 2319a751336..22a2bfbf721 100644 --- a/src/ansys/dpf/core/operators/geo/normals.py +++ b/src/ansys/dpf/core/operators/geo/normals.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class normals(Operator): r"""compute the normals at the given nodes or element scoping based on the @@ -174,15 +180,17 @@ class InputsNormals(_Inputs): def __init__(self, op: Operator): super().__init__(normals._spec().inputs, op) - self._mesh = Input(normals._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input(normals._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(normals._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + normals._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._field = Input(normals._spec().input_pin(3), 3, op, -1) + self._field: Input[Field] = Input(normals._spec().input_pin(3), 3, op, -1) self._inputs.append(self._field) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -201,7 +209,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -220,7 +228,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -253,11 +261,11 @@ class OutputsNormals(_Outputs): def __init__(self, op: Operator): super().__init__(normals._spec().outputs, op) - self._field = Output(normals._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(normals._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/normals_provider_nl.py b/src/ansys/dpf/core/operators/geo/normals_provider_nl.py index 6df29fa6e72..11d9a1de1e6 100644 --- a/src/ansys/dpf/core/operators/geo/normals_provider_nl.py +++ b/src/ansys/dpf/core/operators/geo/normals_provider_nl.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class normals_provider_nl(Operator): r"""Computes the normals on nodes/faces/elements based on integration points @@ -180,17 +186,21 @@ class InputsNormalsProviderNl(_Inputs): def __init__(self, op: Operator): super().__init__(normals_provider_nl._spec().inputs, op) - self._mesh = Input(normals_provider_nl._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + normals_provider_nl._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._mesh_scoping = Input(normals_provider_nl._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + normals_provider_nl._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._requested_location = Input( + self._requested_location: Input[str] = Input( normals_provider_nl._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Skin, face, or shell mesh region. @@ -211,7 +221,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Elemental, ElementalNodal, or Nodal scoping. Location derived from this. @@ -232,7 +242,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. If no scoping, specifies location. If scoping is Elemental or ElementalNodal this overrides scoping. Default is Elemental. @@ -267,11 +277,13 @@ class OutputsNormalsProviderNl(_Outputs): def __init__(self, op: Operator): super().__init__(normals_provider_nl._spec().outputs, op) - self._field = Output(normals_provider_nl._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + normals_provider_nl._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/rotate.py b/src/ansys/dpf/core/operators/geo/rotate.py index 80a3591629f..dd2cb4c9acf 100644 --- a/src/ansys/dpf/core/operators/geo/rotate.py +++ b/src/ansys/dpf/core/operators/geo/rotate.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class rotate(Operator): r"""Applies a transformation (rotation) matrix on a field. @@ -158,13 +163,17 @@ class InputsRotate(_Inputs): def __init__(self, op: Operator): super().__init__(rotate._spec().inputs, op) - self._field = Input(rotate._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + rotate._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._field_rotation_matrix = Input(rotate._spec().input_pin(1), 1, op, -1) + self._field_rotation_matrix: Input[Field] = Input( + rotate._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._field_rotation_matrix) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -185,7 +194,7 @@ def field(self) -> Input: return self._field @property - def field_rotation_matrix(self) -> Input: + def field_rotation_matrix(self) -> Input[Field]: r"""Allows to connect field_rotation_matrix input to the operator. 3-3 rotation matrix @@ -220,11 +229,11 @@ class OutputsRotate(_Outputs): def __init__(self, op: Operator): super().__init__(rotate._spec().outputs, op) - self._field = Output(rotate._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(rotate._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/rotate_fc.py b/src/ansys/dpf/core/operators/geo/rotate_fc.py index 550991d5395..3289d0f0dcb 100644 --- a/src/ansys/dpf/core/operators/geo/rotate_fc.py +++ b/src/ansys/dpf/core/operators/geo/rotate_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class rotate_fc(Operator): r"""Apply a transformation (rotation) matrix on all the fields of a fields @@ -159,13 +164,17 @@ class InputsRotateFc(_Inputs): def __init__(self, op: Operator): super().__init__(rotate_fc._spec().inputs, op) - self._fields_container = Input(rotate_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + rotate_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._coordinate_system = Input(rotate_fc._spec().input_pin(1), 1, op, -1) + self._coordinate_system: Input[Field] = Input( + rotate_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._coordinate_system) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -184,7 +193,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def coordinate_system(self) -> Input: + def coordinate_system(self) -> Input[Field]: r"""Allows to connect coordinate_system input to the operator. 3-3 rotation matrix @@ -219,11 +228,13 @@ class OutputsRotateFc(_Outputs): def __init__(self, op: Operator): super().__init__(rotate_fc._spec().outputs, op) - self._fields_container = Output(rotate_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + rotate_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py b/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py index 7f39a53fe21..c1523d82087 100644 --- a/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py +++ b/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class rotate_in_cylindrical_cs(Operator): r"""Rotates a field to its corresponding values into the specified @@ -179,17 +185,21 @@ class InputsRotateInCylindricalCs(_Inputs): def __init__(self, op: Operator): super().__init__(rotate_in_cylindrical_cs._spec().inputs, op) - self._field = Input(rotate_in_cylindrical_cs._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + rotate_in_cylindrical_cs._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._coordinate_system = Input( + self._coordinate_system: Input[Field] = Input( rotate_in_cylindrical_cs._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._coordinate_system) - self._mesh = Input(rotate_in_cylindrical_cs._spec().input_pin(2), 2, op, -1) + self._mesh: Input[MeshedRegion] = Input( + rotate_in_cylindrical_cs._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -210,7 +220,7 @@ def field(self) -> Input: return self._field @property - def coordinate_system(self) -> Input: + def coordinate_system(self) -> Input[Field]: r"""Allows to connect coordinate_system input to the operator. 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. @@ -231,7 +241,7 @@ def coordinate_system(self) -> Input: return self._coordinate_system @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh support of the input field. @@ -266,11 +276,13 @@ class OutputsRotateInCylindricalCs(_Outputs): def __init__(self, op: Operator): super().__init__(rotate_in_cylindrical_cs._spec().outputs, op) - self._field = Output(rotate_in_cylindrical_cs._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + rotate_in_cylindrical_cs._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py b/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py index 011e2b780be..b30efe6d5b2 100644 --- a/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py +++ b/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class rotate_in_cylindrical_cs_fc(Operator): r"""Rotates all the fields of a fields container (not defined with a @@ -186,17 +192,21 @@ class InputsRotateInCylindricalCsFc(_Inputs): def __init__(self, op: Operator): super().__init__(rotate_in_cylindrical_cs_fc._spec().inputs, op) - self._field = Input(rotate_in_cylindrical_cs_fc._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + rotate_in_cylindrical_cs_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._coordinate_system = Input( + self._coordinate_system: Input[Field] = Input( rotate_in_cylindrical_cs_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._coordinate_system) - self._mesh = Input(rotate_in_cylindrical_cs_fc._spec().input_pin(2), 2, op, -1) + self._mesh: Input[MeshedRegion] = Input( + rotate_in_cylindrical_cs_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns @@ -215,7 +225,7 @@ def field(self) -> Input: return self._field @property - def coordinate_system(self) -> Input: + def coordinate_system(self) -> Input[Field]: r"""Allows to connect coordinate_system input to the operator. 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. @@ -236,7 +246,7 @@ def coordinate_system(self) -> Input: return self._coordinate_system @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh support of the input fields_container, in case it does not have one defined. @@ -271,13 +281,13 @@ class OutputsRotateInCylindricalCsFc(_Outputs): def __init__(self, op: Operator): super().__init__(rotate_in_cylindrical_cs_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( rotate_in_cylindrical_cs_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/spherical_to_cartesian.py b/src/ansys/dpf/core/operators/geo/spherical_to_cartesian.py index d13501c2872..eb2464048a1 100644 --- a/src/ansys/dpf/core/operators/geo/spherical_to_cartesian.py +++ b/src/ansys/dpf/core/operators/geo/spherical_to_cartesian.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class spherical_to_cartesian(Operator): r"""Converts 3D field from spherical coordinates to cartesian coordinates. @@ -140,11 +145,13 @@ class InputsSphericalToCartesian(_Inputs): def __init__(self, op: Operator): super().__init__(spherical_to_cartesian._spec().inputs, op) - self._field = Input(spherical_to_cartesian._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + spherical_to_cartesian._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns @@ -177,11 +184,13 @@ class OutputsSphericalToCartesian(_Outputs): def __init__(self, op: Operator): super().__init__(spherical_to_cartesian._spec().outputs, op) - self._field = Output(spherical_to_cartesian._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + spherical_to_cartesian._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/spherical_to_cartesian_fc.py b/src/ansys/dpf/core/operators/geo/spherical_to_cartesian_fc.py index 3a9437a0e9c..31f8dea3c76 100644 --- a/src/ansys/dpf/core/operators/geo/spherical_to_cartesian_fc.py +++ b/src/ansys/dpf/core/operators/geo/spherical_to_cartesian_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class spherical_to_cartesian_fc(Operator): r"""Converts 3D field from spherical coordinates to cartesian coordinates. @@ -140,13 +144,13 @@ class InputsSphericalToCartesianFc(_Inputs): def __init__(self, op: Operator): super().__init__(spherical_to_cartesian_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( spherical_to_cartesian_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -179,13 +183,13 @@ class OutputsSphericalToCartesianFc(_Outputs): def __init__(self, op: Operator): super().__init__(spherical_to_cartesian_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( spherical_to_cartesian_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/geo/to_polar_coordinates.py b/src/ansys/dpf/core/operators/geo/to_polar_coordinates.py index 4d9b338e5b7..864d0d95a36 100644 --- a/src/ansys/dpf/core/operators/geo/to_polar_coordinates.py +++ b/src/ansys/dpf/core/operators/geo/to_polar_coordinates.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class to_polar_coordinates(Operator): r"""Finds r, theta (rad), and z coordinates of a coordinates (nodal) field @@ -160,15 +165,17 @@ class InputsToPolarCoordinates(_Inputs): def __init__(self, op: Operator): super().__init__(to_polar_coordinates._spec().inputs, op) - self._field = Input(to_polar_coordinates._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + to_polar_coordinates._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._coordinate_system = Input( + self._coordinate_system: Input[Field] = Input( to_polar_coordinates._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._coordinate_system) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -189,7 +196,7 @@ def field(self) -> Input: return self._field @property - def coordinate_system(self) -> Input: + def coordinate_system(self) -> Input[Field]: r"""Allows to connect coordinate_system input to the operator. 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. By default, the rotation axis is the z axis and the origin is [0,0,0]. @@ -224,11 +231,13 @@ class OutputsToPolarCoordinates(_Outputs): def __init__(self, op: Operator): super().__init__(to_polar_coordinates._spec().outputs, op) - self._field = Output(to_polar_coordinates._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + to_polar_coordinates._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/info/markdown_latex_example.py b/src/ansys/dpf/core/operators/info/markdown_latex_example.py index 8883471f600..3d56c3b6c8e 100644 --- a/src/ansys/dpf/core/operators/info/markdown_latex_example.py +++ b/src/ansys/dpf/core/operators/info/markdown_latex_example.py @@ -713,13 +713,13 @@ class InputsMarkdownLatexExample(_Inputs): def __init__(self, op: Operator): super().__init__(markdown_latex_example._spec().inputs, op) - self._bogus_input = Input( + self._bogus_input: Input[str] = Input( markdown_latex_example._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._bogus_input) @property - def bogus_input(self) -> Input: + def bogus_input(self) -> Input[str]: r"""Allows to connect bogus_input input to the operator. This pin showcases the use of Markdown and LaTeX in pin descriptions: @@ -840,7 +840,9 @@ class OutputsMarkdownLatexExample(_Outputs): def __init__(self, op: Operator): super().__init__(markdown_latex_example._spec().outputs, op) - self._bogus_output = Output(markdown_latex_example._spec().output_pin(0), 0, op) + self._bogus_output: Output = Output( + markdown_latex_example._spec().output_pin(0), 0, op + ) self._outputs.append(self._bogus_output) @property diff --git a/src/ansys/dpf/core/operators/invariant/convertnum_bcs_to_nod.py b/src/ansys/dpf/core/operators/invariant/convertnum_bcs_to_nod.py index b4701d18f00..5503060e6dc 100644 --- a/src/ansys/dpf/core/operators/invariant/convertnum_bcs_to_nod.py +++ b/src/ansys/dpf/core/operators/invariant/convertnum_bcs_to_nod.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + class convertnum_bcs_to_nod(Operator): r"""Converts a fields container from BCS to NOD ordering. @@ -158,17 +163,17 @@ class InputsConvertnumBcsToNod(_Inputs): def __init__(self, op: Operator): super().__init__(convertnum_bcs_to_nod._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( convertnum_bcs_to_nod._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( convertnum_bcs_to_nod._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. fields_container @@ -189,7 +194,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data_sources (must contain the full file). @@ -224,13 +229,13 @@ class OutputsConvertnumBcsToNod(_Outputs): def __init__(self, op: Operator): super().__init__(convertnum_bcs_to_nod._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( convertnum_bcs_to_nod._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/convertnum_nod_to_bcs.py b/src/ansys/dpf/core/operators/invariant/convertnum_nod_to_bcs.py index 59b654e5c1d..e49f823afde 100644 --- a/src/ansys/dpf/core/operators/invariant/convertnum_nod_to_bcs.py +++ b/src/ansys/dpf/core/operators/invariant/convertnum_nod_to_bcs.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + class convertnum_nod_to_bcs(Operator): r"""Converts a fields container from NOD to BCS ordering. @@ -158,17 +163,17 @@ class InputsConvertnumNodToBcs(_Inputs): def __init__(self, op: Operator): super().__init__(convertnum_nod_to_bcs._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( convertnum_nod_to_bcs._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( convertnum_nod_to_bcs._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. fields_container @@ -189,7 +194,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data_sources (must contain the full file). @@ -224,13 +229,13 @@ class OutputsConvertnumNodToBcs(_Outputs): def __init__(self, op: Operator): super().__init__(convertnum_nod_to_bcs._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( convertnum_nod_to_bcs._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/convertnum_op.py b/src/ansys/dpf/core/operators/invariant/convertnum_op.py index 861dedf39e2..ba32fd79cf6 100644 --- a/src/ansys/dpf/core/operators/invariant/convertnum_op.py +++ b/src/ansys/dpf/core/operators/invariant/convertnum_op.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + class convertnum_op(Operator): r"""Converts a fields container from one mapdl ordering to another mapdl @@ -196,17 +201,25 @@ class InputsConvertnumOp(_Inputs): def __init__(self, op: Operator): super().__init__(convertnum_op._spec().inputs, op) - self._input_ordering = Input(convertnum_op._spec().input_pin(0), 0, op, -1) + self._input_ordering: Input[int] = Input( + convertnum_op._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._input_ordering) - self._output_ordering = Input(convertnum_op._spec().input_pin(1), 1, op, -1) + self._output_ordering: Input[int] = Input( + convertnum_op._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._output_ordering) - self._fields_container = Input(convertnum_op._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + convertnum_op._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._data_sources = Input(convertnum_op._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + convertnum_op._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def input_ordering(self) -> Input: + def input_ordering(self) -> Input[int]: r"""Allows to connect input_ordering input to the operator. Input ordering number @@ -227,7 +240,7 @@ def input_ordering(self) -> Input: return self._input_ordering @property - def output_ordering(self) -> Input: + def output_ordering(self) -> Input[int]: r"""Allows to connect output_ordering input to the operator. Output ordering number @@ -248,7 +261,7 @@ def output_ordering(self) -> Input: return self._output_ordering @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Expect fields container @@ -269,7 +282,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data_sources (must contain the full file). @@ -304,11 +317,13 @@ class OutputsConvertnumOp(_Outputs): def __init__(self, op: Operator): super().__init__(convertnum_op._spec().outputs, op) - self._fields_container = Output(convertnum_op._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + convertnum_op._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/eigen_values.py b/src/ansys/dpf/core/operators/invariant/eigen_values.py index ad56ea0e827..cf33eca85a6 100644 --- a/src/ansys/dpf/core/operators/invariant/eigen_values.py +++ b/src/ansys/dpf/core/operators/invariant/eigen_values.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class eigen_values(Operator): r"""Computes the element-wise Eigen values of a tensor field. @@ -141,11 +146,13 @@ class InputsEigenValues(_Inputs): def __init__(self, op: Operator): super().__init__(eigen_values._spec().inputs, op) - self._field = Input(eigen_values._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + eigen_values._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -180,11 +187,11 @@ class OutputsEigenValues(_Outputs): def __init__(self, op: Operator): super().__init__(eigen_values._spec().outputs, op) - self._field = Output(eigen_values._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(eigen_values._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/eigen_values_fc.py b/src/ansys/dpf/core/operators/invariant/eigen_values_fc.py index ec0ab9d3e57..715d523746c 100644 --- a/src/ansys/dpf/core/operators/invariant/eigen_values_fc.py +++ b/src/ansys/dpf/core/operators/invariant/eigen_values_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class eigen_values_fc(Operator): r"""Computes the element-wise Eigen values of all the tensor fields of a @@ -142,11 +146,13 @@ class InputsEigenValuesFc(_Inputs): def __init__(self, op: Operator): super().__init__(eigen_values_fc._spec().inputs, op) - self._fields_container = Input(eigen_values_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + eigen_values_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -179,11 +185,13 @@ class OutputsEigenValuesFc(_Outputs): def __init__(self, op: Operator): super().__init__(eigen_values_fc._spec().outputs, op) - self._fields_container = Output(eigen_values_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + eigen_values_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/eigen_vectors.py b/src/ansys/dpf/core/operators/invariant/eigen_vectors.py index a2d3aa3fe16..4e9f9361afa 100644 --- a/src/ansys/dpf/core/operators/invariant/eigen_vectors.py +++ b/src/ansys/dpf/core/operators/invariant/eigen_vectors.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class eigen_vectors(Operator): r"""Computes the element-wise Eigen vectors for each tensor in the field. @@ -141,11 +146,13 @@ class InputsEigenVectors(_Inputs): def __init__(self, op: Operator): super().__init__(eigen_vectors._spec().inputs, op) - self._field = Input(eigen_vectors._spec().input_pin(0), 0, op, -1) + self._field: Input[FieldsContainer | Field] = Input( + eigen_vectors._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[FieldsContainer | Field]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -180,11 +187,11 @@ class OutputsEigenVectors(_Outputs): def __init__(self, op: Operator): super().__init__(eigen_vectors._spec().outputs, op) - self._field = Output(eigen_vectors._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(eigen_vectors._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py b/src/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py index 08998471414..47c1f2f2053 100644 --- a/src/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py +++ b/src/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class eigen_vectors_fc(Operator): r"""Computes the element-wise Eigen vectors for each tensor in the fields of @@ -142,11 +147,13 @@ class InputsEigenVectorsFc(_Inputs): def __init__(self, op: Operator): super().__init__(eigen_vectors_fc._spec().inputs, op) - self._fields_container = Input(eigen_vectors_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer | Field] = Input( + eigen_vectors_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer | Field]: r"""Allows to connect fields_container input to the operator. Returns @@ -179,11 +186,13 @@ class OutputsEigenVectorsFc(_Outputs): def __init__(self, op: Operator): super().__init__(eigen_vectors_fc._spec().outputs, op) - self._fields_container = Output(eigen_vectors_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + eigen_vectors_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/invariants.py b/src/ansys/dpf/core/operators/invariant/invariants.py index 5d600961fe2..94f90ad9169 100644 --- a/src/ansys/dpf/core/operators/invariant/invariants.py +++ b/src/ansys/dpf/core/operators/invariant/invariants.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class invariants(Operator): r"""Computes the element-wise invariants of a tensor field. @@ -159,11 +163,11 @@ class InputsInvariants(_Inputs): def __init__(self, op: Operator): super().__init__(invariants._spec().inputs, op) - self._field = Input(invariants._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(invariants._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -198,15 +202,17 @@ class OutputsInvariants(_Outputs): def __init__(self, op: Operator): super().__init__(invariants._spec().outputs, op) - self._field_int = Output(invariants._spec().output_pin(0), 0, op) + self._field_int: Output[Field] = Output(invariants._spec().output_pin(0), 0, op) self._outputs.append(self._field_int) - self._field_eqv = Output(invariants._spec().output_pin(1), 1, op) + self._field_eqv: Output[Field] = Output(invariants._spec().output_pin(1), 1, op) self._outputs.append(self._field_eqv) - self._field_max_shear = Output(invariants._spec().output_pin(2), 2, op) + self._field_max_shear: Output[Field] = Output( + invariants._spec().output_pin(2), 2, op + ) self._outputs.append(self._field_max_shear) @property - def field_int(self) -> Output: + def field_int(self) -> Output[Field]: r"""Allows to get field_int output of the operator stress intensity field @@ -226,7 +232,7 @@ def field_int(self) -> Output: return self._field_int @property - def field_eqv(self) -> Output: + def field_eqv(self) -> Output[Field]: r"""Allows to get field_eqv output of the operator stress equivalent intensity @@ -246,7 +252,7 @@ def field_eqv(self) -> Output: return self._field_eqv @property - def field_max_shear(self) -> Output: + def field_max_shear(self) -> Output[Field]: r"""Allows to get field_max_shear output of the operator max shear stress field diff --git a/src/ansys/dpf/core/operators/invariant/invariants_fc.py b/src/ansys/dpf/core/operators/invariant/invariants_fc.py index 054df7fe625..a285ef5c8c0 100644 --- a/src/ansys/dpf/core/operators/invariant/invariants_fc.py +++ b/src/ansys/dpf/core/operators/invariant/invariants_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class invariants_fc(Operator): r"""Computes the element-wise invariants of all the tensor fields of a @@ -161,11 +165,13 @@ class InputsInvariantsFc(_Inputs): def __init__(self, op: Operator): super().__init__(invariants_fc._spec().inputs, op) - self._fields_container = Input(invariants_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + invariants_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -200,15 +206,21 @@ class OutputsInvariantsFc(_Outputs): def __init__(self, op: Operator): super().__init__(invariants_fc._spec().outputs, op) - self._fields_int = Output(invariants_fc._spec().output_pin(0), 0, op) + self._fields_int: Output[FieldsContainer] = Output( + invariants_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_int) - self._fields_eqv = Output(invariants_fc._spec().output_pin(1), 1, op) + self._fields_eqv: Output[FieldsContainer] = Output( + invariants_fc._spec().output_pin(1), 1, op + ) self._outputs.append(self._fields_eqv) - self._fields_max_shear = Output(invariants_fc._spec().output_pin(2), 2, op) + self._fields_max_shear: Output[FieldsContainer] = Output( + invariants_fc._spec().output_pin(2), 2, op + ) self._outputs.append(self._fields_max_shear) @property - def fields_int(self) -> Output: + def fields_int(self) -> Output[FieldsContainer]: r"""Allows to get fields_int output of the operator stress intensity field @@ -228,7 +240,7 @@ def fields_int(self) -> Output: return self._fields_int @property - def fields_eqv(self) -> Output: + def fields_eqv(self) -> Output[FieldsContainer]: r"""Allows to get fields_eqv output of the operator stress equivalent intensity @@ -248,7 +260,7 @@ def fields_eqv(self) -> Output: return self._fields_eqv @property - def fields_max_shear(self) -> Output: + def fields_max_shear(self) -> Output[FieldsContainer]: r"""Allows to get fields_max_shear output of the operator max shear stress field diff --git a/src/ansys/dpf/core/operators/invariant/principal_invariants.py b/src/ansys/dpf/core/operators/invariant/principal_invariants.py index ce52c40b4b4..6d5f62efc85 100644 --- a/src/ansys/dpf/core/operators/invariant/principal_invariants.py +++ b/src/ansys/dpf/core/operators/invariant/principal_invariants.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class principal_invariants(Operator): r"""Computes the element-wise Eigen values of a tensor field. @@ -159,11 +163,13 @@ class InputsPrincipalInvariants(_Inputs): def __init__(self, op: Operator): super().__init__(principal_invariants._spec().inputs, op) - self._field = Input(principal_invariants._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input( + principal_invariants._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -198,15 +204,21 @@ class OutputsPrincipalInvariants(_Outputs): def __init__(self, op: Operator): super().__init__(principal_invariants._spec().outputs, op) - self._field_eig_1 = Output(principal_invariants._spec().output_pin(0), 0, op) + self._field_eig_1: Output[Field] = Output( + principal_invariants._spec().output_pin(0), 0, op + ) self._outputs.append(self._field_eig_1) - self._field_eig_2 = Output(principal_invariants._spec().output_pin(1), 1, op) + self._field_eig_2: Output[Field] = Output( + principal_invariants._spec().output_pin(1), 1, op + ) self._outputs.append(self._field_eig_2) - self._field_eig_3 = Output(principal_invariants._spec().output_pin(2), 2, op) + self._field_eig_3: Output[Field] = Output( + principal_invariants._spec().output_pin(2), 2, op + ) self._outputs.append(self._field_eig_3) @property - def field_eig_1(self) -> Output: + def field_eig_1(self) -> Output[Field]: r"""Allows to get field_eig_1 output of the operator first eigen value field @@ -226,7 +238,7 @@ def field_eig_1(self) -> Output: return self._field_eig_1 @property - def field_eig_2(self) -> Output: + def field_eig_2(self) -> Output[Field]: r"""Allows to get field_eig_2 output of the operator second eigen value field @@ -246,7 +258,7 @@ def field_eig_2(self) -> Output: return self._field_eig_2 @property - def field_eig_3(self) -> Output: + def field_eig_3(self) -> Output[Field]: r"""Allows to get field_eig_3 output of the operator third eigen value field diff --git a/src/ansys/dpf/core/operators/invariant/principal_invariants_fc.py b/src/ansys/dpf/core/operators/invariant/principal_invariants_fc.py index ed18412e210..1338989865f 100644 --- a/src/ansys/dpf/core/operators/invariant/principal_invariants_fc.py +++ b/src/ansys/dpf/core/operators/invariant/principal_invariants_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class principal_invariants_fc(Operator): r"""Computes the element-wise Eigen values of all the tensor fields of a @@ -161,13 +165,13 @@ class InputsPrincipalInvariantsFc(_Inputs): def __init__(self, op: Operator): super().__init__(principal_invariants_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( principal_invariants_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -202,21 +206,21 @@ class OutputsPrincipalInvariantsFc(_Outputs): def __init__(self, op: Operator): super().__init__(principal_invariants_fc._spec().outputs, op) - self._fields_eig_1 = Output( + self._fields_eig_1: Output[FieldsContainer] = Output( principal_invariants_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_eig_1) - self._fields_eig_2 = Output( + self._fields_eig_2: Output[FieldsContainer] = Output( principal_invariants_fc._spec().output_pin(1), 1, op ) self._outputs.append(self._fields_eig_2) - self._fields_eig_3 = Output( + self._fields_eig_3: Output[FieldsContainer] = Output( principal_invariants_fc._spec().output_pin(2), 2, op ) self._outputs.append(self._fields_eig_3) @property - def fields_eig_1(self) -> Output: + def fields_eig_1(self) -> Output[FieldsContainer]: r"""Allows to get fields_eig_1 output of the operator first eigen value fields @@ -236,7 +240,7 @@ def fields_eig_1(self) -> Output: return self._fields_eig_1 @property - def fields_eig_2(self) -> Output: + def fields_eig_2(self) -> Output[FieldsContainer]: r"""Allows to get fields_eig_2 output of the operator second eigen value fields @@ -256,7 +260,7 @@ def fields_eig_2(self) -> Output: return self._fields_eig_2 @property - def fields_eig_3(self) -> Output: + def fields_eig_3(self) -> Output[FieldsContainer]: r"""Allows to get fields_eig_3 output of the operator third eigen value fields diff --git a/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py b/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py index 018ac03c92b..35080eca2a6 100644 --- a/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py +++ b/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class segalman_von_mises_eqv(Operator): r"""Computes the element-wise Segalman Von-Mises criteria on a tensor field. @@ -141,11 +146,13 @@ class InputsSegalmanVonMisesEqv(_Inputs): def __init__(self, op: Operator): super().__init__(segalman_von_mises_eqv._spec().inputs, op) - self._field = Input(segalman_von_mises_eqv._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + segalman_von_mises_eqv._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -180,11 +187,13 @@ class OutputsSegalmanVonMisesEqv(_Outputs): def __init__(self, op: Operator): super().__init__(segalman_von_mises_eqv._spec().outputs, op) - self._field = Output(segalman_von_mises_eqv._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + segalman_von_mises_eqv._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py b/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py index 38316479c62..6cc5ee8fe51 100644 --- a/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py +++ b/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class segalman_von_mises_eqv_fc(Operator): r"""Computes the element-wise Segalman Von-Mises criteria on all the tensor @@ -142,13 +146,13 @@ class InputsSegalmanVonMisesEqvFc(_Inputs): def __init__(self, op: Operator): super().__init__(segalman_von_mises_eqv_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( segalman_von_mises_eqv_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -181,13 +185,13 @@ class OutputsSegalmanVonMisesEqvFc(_Outputs): def __init__(self, op: Operator): super().__init__(segalman_von_mises_eqv_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( segalman_von_mises_eqv_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/von_mises_eqv.py b/src/ansys/dpf/core/operators/invariant/von_mises_eqv.py index 7e22baf6c65..c020285c7c4 100644 --- a/src/ansys/dpf/core/operators/invariant/von_mises_eqv.py +++ b/src/ansys/dpf/core/operators/invariant/von_mises_eqv.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class von_mises_eqv(Operator): r"""Computes the element-wise Von-Mises criteria on a tensor field. @@ -156,13 +161,17 @@ class InputsVonMisesEqv(_Inputs): def __init__(self, op: Operator): super().__init__(von_mises_eqv._spec().inputs, op) - self._field = Input(von_mises_eqv._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + von_mises_eqv._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._poisson_ratio = Input(von_mises_eqv._spec().input_pin(13), 13, op, -1) + self._poisson_ratio: Input[float | int] = Input( + von_mises_eqv._spec().input_pin(13), 13, op, -1 + ) self._inputs.append(self._poisson_ratio) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -183,7 +192,7 @@ def field(self) -> Input: return self._field @property - def poisson_ratio(self) -> Input: + def poisson_ratio(self) -> Input[float | int]: r"""Allows to connect poisson_ratio input to the operator. Poisson ratio to be used in equivalent strain calculation. @@ -218,11 +227,11 @@ class OutputsVonMisesEqv(_Outputs): def __init__(self, op: Operator): super().__init__(von_mises_eqv._spec().outputs, op) - self._field = Output(von_mises_eqv._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(von_mises_eqv._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py b/src/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py index 8eaf11eb174..4b207d47bcf 100644 --- a/src/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py +++ b/src/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class von_mises_eqv_fc(Operator): r"""Computes the element-wise Von-Mises criteria on all the tensor fields of @@ -159,13 +163,17 @@ class InputsVonMisesEqvFc(_Inputs): def __init__(self, op: Operator): super().__init__(von_mises_eqv_fc._spec().inputs, op) - self._fields_container = Input(von_mises_eqv_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + von_mises_eqv_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._poisson_ratio = Input(von_mises_eqv_fc._spec().input_pin(13), 13, op, -1) + self._poisson_ratio: Input[float | int] = Input( + von_mises_eqv_fc._spec().input_pin(13), 13, op, -1 + ) self._inputs.append(self._poisson_ratio) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -184,7 +192,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def poisson_ratio(self) -> Input: + def poisson_ratio(self) -> Input[float | int]: r"""Allows to connect poisson_ratio input to the operator. Poisson ratio to be used in equivalent strain calculation. @@ -219,11 +227,13 @@ class OutputsVonMisesEqvFc(_Outputs): def __init__(self, op: Operator): super().__init__(von_mises_eqv_fc._spec().outputs, op) - self._fields_container = Output(von_mises_eqv_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + von_mises_eqv_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/ascending_sort.py b/src/ansys/dpf/core/operators/logic/ascending_sort.py index 1f5fc2a34bc..c80a5e7766d 100644 --- a/src/ansys/dpf/core/operators/logic/ascending_sort.py +++ b/src/ansys/dpf/core/operators/logic/ascending_sort.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class ascending_sort(Operator): r"""Sort a field (in 0) in ascending order with an optional component @@ -182,17 +187,21 @@ class InputsAscendingSort(_Inputs): def __init__(self, op: Operator): super().__init__(ascending_sort._spec().inputs, op) - self._field = Input(ascending_sort._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + ascending_sort._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._component_priority_table = Input( + self._component_priority_table: Input = Input( ascending_sort._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._component_priority_table) - self._sort_by_scoping = Input(ascending_sort._spec().input_pin(2), 2, op, -1) + self._sort_by_scoping: Input[bool] = Input( + ascending_sort._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._sort_by_scoping) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -234,7 +243,7 @@ def component_priority_table(self) -> Input: return self._component_priority_table @property - def sort_by_scoping(self) -> Input: + def sort_by_scoping(self) -> Input[bool]: r"""Allows to connect sort_by_scoping input to the operator. if true, uses scoping to sort the field (default is false) @@ -269,11 +278,11 @@ class OutputsAscendingSort(_Outputs): def __init__(self, op: Operator): super().__init__(ascending_sort._spec().outputs, op) - self._field = Output(ascending_sort._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(ascending_sort._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/ascending_sort_fc.py b/src/ansys/dpf/core/operators/logic/ascending_sort_fc.py index caa6e78c5a6..75048e2b736 100644 --- a/src/ansys/dpf/core/operators/logic/ascending_sort_fc.py +++ b/src/ansys/dpf/core/operators/logic/ascending_sort_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class ascending_sort_fc(Operator): r"""Sort a field (in 0) in ascending order with an optional component @@ -182,19 +186,21 @@ class InputsAscendingSortFc(_Inputs): def __init__(self, op: Operator): super().__init__(ascending_sort_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( ascending_sort_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._component_priority_table = Input( + self._component_priority_table: Input = Input( ascending_sort_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._component_priority_table) - self._sort_by_scoping = Input(ascending_sort_fc._spec().input_pin(2), 2, op, -1) + self._sort_by_scoping: Input[bool] = Input( + ascending_sort_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._sort_by_scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -236,7 +242,7 @@ def component_priority_table(self) -> Input: return self._component_priority_table @property - def sort_by_scoping(self) -> Input: + def sort_by_scoping(self) -> Input[bool]: r"""Allows to connect sort_by_scoping input to the operator. if true, uses scoping to sort the field (default is false) @@ -271,11 +277,13 @@ class OutputsAscendingSortFc(_Outputs): def __init__(self, op: Operator): super().__init__(ascending_sort_fc._spec().outputs, op) - self._fields_container = Output(ascending_sort_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + ascending_sort_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/component_selector.py b/src/ansys/dpf/core/operators/logic/component_selector.py index fce488b701b..17363a3c186 100644 --- a/src/ansys/dpf/core/operators/logic/component_selector.py +++ b/src/ansys/dpf/core/operators/logic/component_selector.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class component_selector(Operator): r"""Creates a scalar/vector field based on the selected component. @@ -177,17 +182,21 @@ class InputsComponentSelector(_Inputs): def __init__(self, op: Operator): super().__init__(component_selector._spec().inputs, op) - self._field = Input(component_selector._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + component_selector._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._component_number = Input( + self._component_number: Input[int] = Input( component_selector._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._component_number) - self._default_value = Input(component_selector._spec().input_pin(2), 2, op, -1) + self._default_value: Input[float] = Input( + component_selector._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._default_value) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns @@ -206,7 +215,7 @@ def field(self) -> Input: return self._field @property - def component_number(self) -> Input: + def component_number(self) -> Input[int]: r"""Allows to connect component_number input to the operator. One or several component index that will be extracted from the initial field. @@ -227,7 +236,7 @@ def component_number(self) -> Input: return self._component_number @property - def default_value(self) -> Input: + def default_value(self) -> Input[float]: r"""Allows to connect default_value input to the operator. Set a default value for components that do not exist. @@ -262,11 +271,13 @@ class OutputsComponentSelector(_Outputs): def __init__(self, op: Operator): super().__init__(component_selector._spec().outputs, op) - self._field = Output(component_selector._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + component_selector._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/component_selector_fc.py b/src/ansys/dpf/core/operators/logic/component_selector_fc.py index 1227a064c9b..209d39ee737 100644 --- a/src/ansys/dpf/core/operators/logic/component_selector_fc.py +++ b/src/ansys/dpf/core/operators/logic/component_selector_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class component_selector_fc(Operator): r"""Creates a scalar fields container based on the selected component for @@ -159,17 +164,17 @@ class InputsComponentSelectorFc(_Inputs): def __init__(self, op: Operator): super().__init__(component_selector_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer | Field] = Input( component_selector_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._component_number = Input( + self._component_number: Input[int] = Input( component_selector_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._component_number) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer | Field]: r"""Allows to connect fields_container input to the operator. Returns @@ -188,7 +193,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def component_number(self) -> Input: + def component_number(self) -> Input[int]: r"""Allows to connect component_number input to the operator. one or several component index that will be extracted from the initial field. @@ -223,13 +228,13 @@ class OutputsComponentSelectorFc(_Outputs): def __init__(self, op: Operator): super().__init__(component_selector_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( component_selector_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/component_transformer.py b/src/ansys/dpf/core/operators/logic/component_transformer.py index 02c987a750f..178db401654 100644 --- a/src/ansys/dpf/core/operators/logic/component_transformer.py +++ b/src/ansys/dpf/core/operators/logic/component_transformer.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class component_transformer(Operator): r"""Takes the input field and creates a field with overriden value on given @@ -179,19 +184,21 @@ class InputsComponentTransformer(_Inputs): def __init__(self, op: Operator): super().__init__(component_transformer._spec().inputs, op) - self._field = Input(component_transformer._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + component_transformer._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._component_number = Input( + self._component_number: Input[int] = Input( component_transformer._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._component_number) - self._default_value = Input( + self._default_value: Input[float] = Input( component_transformer._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._default_value) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns @@ -210,7 +217,7 @@ def field(self) -> Input: return self._field @property - def component_number(self) -> Input: + def component_number(self) -> Input[int]: r"""Allows to connect component_number input to the operator. One or several component index that will be modified from the initial field. @@ -231,7 +238,7 @@ def component_number(self) -> Input: return self._component_number @property - def default_value(self) -> Input: + def default_value(self) -> Input[float]: r"""Allows to connect default_value input to the operator. Set a default value for components selected. @@ -266,11 +273,13 @@ class OutputsComponentTransformer(_Outputs): def __init__(self, op: Operator): super().__init__(component_transformer._spec().outputs, op) - self._field = Output(component_transformer._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + component_transformer._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/component_transformer_fc.py b/src/ansys/dpf/core/operators/logic/component_transformer_fc.py index 6f4bd0c5e3d..2fe21c741bd 100644 --- a/src/ansys/dpf/core/operators/logic/component_transformer_fc.py +++ b/src/ansys/dpf/core/operators/logic/component_transformer_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class component_transformer_fc(Operator): r"""Takes the input field and creates a field with overriden value on given @@ -179,21 +183,21 @@ class InputsComponentTransformerFc(_Inputs): def __init__(self, op: Operator): super().__init__(component_transformer_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( component_transformer_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._component_number = Input( + self._component_number: Input[int] = Input( component_transformer_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._component_number) - self._default_value = Input( + self._default_value: Input[float] = Input( component_transformer_fc._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._default_value) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -212,7 +216,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def component_number(self) -> Input: + def component_number(self) -> Input[int]: r"""Allows to connect component_number input to the operator. One or several component index that will be modified from the initial field. @@ -233,7 +237,7 @@ def component_number(self) -> Input: return self._component_number @property - def default_value(self) -> Input: + def default_value(self) -> Input[float]: r"""Allows to connect default_value input to the operator. Set a default value for components selected. @@ -268,13 +272,13 @@ class OutputsComponentTransformerFc(_Outputs): def __init__(self, op: Operator): super().__init__(component_transformer_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( component_transformer_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/descending_sort.py b/src/ansys/dpf/core/operators/logic/descending_sort.py index dfd8e570e69..3ce1b1796fa 100644 --- a/src/ansys/dpf/core/operators/logic/descending_sort.py +++ b/src/ansys/dpf/core/operators/logic/descending_sort.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class descending_sort(Operator): r"""Sort a field (in 0) in descending order, with an optional component @@ -182,17 +187,21 @@ class InputsDescendingSort(_Inputs): def __init__(self, op: Operator): super().__init__(descending_sort._spec().inputs, op) - self._field = Input(descending_sort._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + descending_sort._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._component_priority_table = Input( + self._component_priority_table: Input = Input( descending_sort._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._component_priority_table) - self._sort_by_scoping = Input(descending_sort._spec().input_pin(2), 2, op, -1) + self._sort_by_scoping: Input[bool] = Input( + descending_sort._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._sort_by_scoping) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -234,7 +243,7 @@ def component_priority_table(self) -> Input: return self._component_priority_table @property - def sort_by_scoping(self) -> Input: + def sort_by_scoping(self) -> Input[bool]: r"""Allows to connect sort_by_scoping input to the operator. if true, uses scoping to sort the field (default is false) @@ -269,11 +278,13 @@ class OutputsDescendingSort(_Outputs): def __init__(self, op: Operator): super().__init__(descending_sort._spec().outputs, op) - self._field = Output(descending_sort._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + descending_sort._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/descending_sort_fc.py b/src/ansys/dpf/core/operators/logic/descending_sort_fc.py index 2eb3bc5d8b5..3b02b305d55 100644 --- a/src/ansys/dpf/core/operators/logic/descending_sort_fc.py +++ b/src/ansys/dpf/core/operators/logic/descending_sort_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class descending_sort_fc(Operator): r"""Sort a field (in 0) in descending order, with an optional component @@ -182,21 +186,21 @@ class InputsDescendingSortFc(_Inputs): def __init__(self, op: Operator): super().__init__(descending_sort_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( descending_sort_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._component_priority_table = Input( + self._component_priority_table: Input = Input( descending_sort_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._component_priority_table) - self._sort_by_scoping = Input( + self._sort_by_scoping: Input[bool] = Input( descending_sort_fc._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._sort_by_scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -238,7 +242,7 @@ def component_priority_table(self) -> Input: return self._component_priority_table @property - def sort_by_scoping(self) -> Input: + def sort_by_scoping(self) -> Input[bool]: r"""Allows to connect sort_by_scoping input to the operator. if true, uses scoping to sort the field (default is false) @@ -273,11 +277,13 @@ class OutputsDescendingSortFc(_Outputs): def __init__(self, op: Operator): super().__init__(descending_sort_fc._spec().outputs, op) - self._fields_container = Output(descending_sort_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + descending_sort_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/elementary_data_selector.py b/src/ansys/dpf/core/operators/logic/elementary_data_selector.py index 84b5679226b..3b8605849f8 100644 --- a/src/ansys/dpf/core/operators/logic/elementary_data_selector.py +++ b/src/ansys/dpf/core/operators/logic/elementary_data_selector.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class elementary_data_selector(Operator): r"""Creates a scalar/vector field based on the selected elementary data. @@ -193,23 +198,25 @@ class InputsElementaryDataSelector(_Inputs): def __init__(self, op: Operator): super().__init__(elementary_data_selector._spec().inputs, op) - self._field = Input(elementary_data_selector._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + elementary_data_selector._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._elementary_data_index = Input( + self._elementary_data_index: Input[int] = Input( elementary_data_selector._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._elementary_data_index) - self._default_value = Input( + self._default_value: Input[float] = Input( elementary_data_selector._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._default_value) - self._elementary_data_index_2 = Input( + self._elementary_data_index_2: Input[int] = Input( elementary_data_selector._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._elementary_data_index_2) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns @@ -228,7 +235,7 @@ def field(self) -> Input: return self._field @property - def elementary_data_index(self) -> Input: + def elementary_data_index(self) -> Input[int]: r"""Allows to connect elementary_data_index input to the operator. One or several elementary data index that will be extracted from the initial field. For field with nature matrix, this is the line indices to extract. @@ -249,7 +256,7 @@ def elementary_data_index(self) -> Input: return self._elementary_data_index @property - def default_value(self) -> Input: + def default_value(self) -> Input[float]: r"""Allows to connect default_value input to the operator. Set a default value for elementary data that do not exist. @@ -270,7 +277,7 @@ def default_value(self) -> Input: return self._default_value @property - def elementary_data_index_2(self) -> Input: + def elementary_data_index_2(self) -> Input[int]: r"""Allows to connect elementary_data_index_2 input to the operator. For field with nature matrix, this is the column indices to extract. @@ -305,11 +312,13 @@ class OutputsElementaryDataSelector(_Outputs): def __init__(self, op: Operator): super().__init__(elementary_data_selector._spec().outputs, op) - self._field = Output(elementary_data_selector._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + elementary_data_selector._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/elementary_data_selector_fc.py b/src/ansys/dpf/core/operators/logic/elementary_data_selector_fc.py index a076126bd8a..8d1ba2c664f 100644 --- a/src/ansys/dpf/core/operators/logic/elementary_data_selector_fc.py +++ b/src/ansys/dpf/core/operators/logic/elementary_data_selector_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class elementary_data_selector_fc(Operator): r"""Creates a scalar fields container based on the selected elementary data @@ -183,21 +188,21 @@ class InputsElementaryDataSelectorFc(_Inputs): def __init__(self, op: Operator): super().__init__(elementary_data_selector_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer | Field] = Input( elementary_data_selector_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._elementary_data_index = Input( + self._elementary_data_index: Input[int] = Input( elementary_data_selector_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._elementary_data_index) - self._elementary_data_index_2 = Input( + self._elementary_data_index_2: Input[int] = Input( elementary_data_selector_fc._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._elementary_data_index_2) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer | Field]: r"""Allows to connect fields_container input to the operator. Returns @@ -216,7 +221,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def elementary_data_index(self) -> Input: + def elementary_data_index(self) -> Input[int]: r"""Allows to connect elementary_data_index input to the operator. one or several elementary data indices that will be extracted from the initial field. For a field with a nature matrix, this extracts the line indices. @@ -237,7 +242,7 @@ def elementary_data_index(self) -> Input: return self._elementary_data_index @property - def elementary_data_index_2(self) -> Input: + def elementary_data_index_2(self) -> Input[int]: r"""Allows to connect elementary_data_index_2 input to the operator. For a field with nature matrix, this extracts the column indices. @@ -272,13 +277,13 @@ class OutputsElementaryDataSelectorFc(_Outputs): def __init__(self, op: Operator): super().__init__(elementary_data_selector_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elementary_data_selector_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/enrich_materials.py b/src/ansys/dpf/core/operators/logic/enrich_materials.py index 0536888a344..c132d073ea7 100644 --- a/src/ansys/dpf/core/operators/logic/enrich_materials.py +++ b/src/ansys/dpf/core/operators/logic/enrich_materials.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class enrich_materials(Operator): r"""Takes a MaterialContainer and a stream and enriches the @@ -177,13 +182,17 @@ class InputsEnrichMaterials(_Inputs): def __init__(self, op: Operator): super().__init__(enrich_materials._spec().inputs, op) - self._MaterialContainer = Input( + self._MaterialContainer: Input = Input( enrich_materials._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._MaterialContainer) - self._streams = Input(enrich_materials._spec().input_pin(1), 1, op, -1) + self._streams: Input[StreamsContainer | FieldsContainer] = Input( + enrich_materials._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._streams) - self._streams_mapping = Input(enrich_materials._spec().input_pin(2), 2, op, -1) + self._streams_mapping: Input = Input( + enrich_materials._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._streams_mapping) @property @@ -206,7 +215,7 @@ def MaterialContainer(self) -> Input: return self._MaterialContainer @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer | FieldsContainer]: r"""Allows to connect streams input to the operator. Returns @@ -258,11 +267,13 @@ class OutputsEnrichMaterials(_Outputs): def __init__(self, op: Operator): super().__init__(enrich_materials._spec().outputs, op) - self._MaterialContainer = Output(enrich_materials._spec().output_pin(0), 0, op) + self._MaterialContainer: Output[bool] = Output( + enrich_materials._spec().output_pin(0), 0, op + ) self._outputs.append(self._MaterialContainer) @property - def MaterialContainer(self) -> Output: + def MaterialContainer(self) -> Output[bool]: r"""Allows to get MaterialContainer output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_anys.py b/src/ansys/dpf/core/operators/logic/identical_anys.py index 3c803ce35a5..6cd487bd2d9 100644 --- a/src/ansys/dpf/core/operators/logic/identical_anys.py +++ b/src/ansys/dpf/core/operators/logic/identical_anys.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + class identical_anys(Operator): r"""Takes two Any objects and compares them.Supported types: Field, @@ -227,19 +231,25 @@ class InputsIdenticalAnys(_Inputs): def __init__(self, op: Operator): super().__init__(identical_anys._spec().inputs, op) - self._anyA = Input(identical_anys._spec().input_pin(0), 0, op, -1) + self._anyA: Input[Any] = Input(identical_anys._spec().input_pin(0), 0, op, -1) self._inputs.append(self._anyA) - self._anyB = Input(identical_anys._spec().input_pin(1), 1, op, -1) + self._anyB: Input[Any] = Input(identical_anys._spec().input_pin(1), 1, op, -1) self._inputs.append(self._anyB) - self._double_value = Input(identical_anys._spec().input_pin(2), 2, op, -1) + self._double_value: Input[float] = Input( + identical_anys._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._double_value) - self._double_tolerance = Input(identical_anys._spec().input_pin(3), 3, op, -1) + self._double_tolerance: Input[float] = Input( + identical_anys._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._double_tolerance) - self._compare_auxiliary = Input(identical_anys._spec().input_pin(4), 4, op, -1) + self._compare_auxiliary: Input[bool] = Input( + identical_anys._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._compare_auxiliary) @property - def anyA(self) -> Input: + def anyA(self) -> Input[Any]: r"""Allows to connect anyA input to the operator. Returns @@ -258,7 +268,7 @@ def anyA(self) -> Input: return self._anyA @property - def anyB(self) -> Input: + def anyB(self) -> Input[Any]: r"""Allows to connect anyB input to the operator. Returns @@ -277,7 +287,7 @@ def anyB(self) -> Input: return self._anyB @property - def double_value(self) -> Input: + def double_value(self) -> Input[float]: r"""Allows to connect double_value input to the operator. Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14). @@ -298,7 +308,7 @@ def double_value(self) -> Input: return self._double_value @property - def double_tolerance(self) -> Input: + def double_tolerance(self) -> Input[float]: r"""Allows to connect double_tolerance input to the operator. Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001. @@ -319,7 +329,7 @@ def double_tolerance(self) -> Input: return self._double_tolerance @property - def compare_auxiliary(self) -> Input: + def compare_auxiliary(self) -> Input[bool]: r"""Allows to connect compare_auxiliary input to the operator. For meshes and meshescontainer: compare auxiliary data (i.e property fields, scopings...). Default value is 'false'. @@ -355,13 +365,15 @@ class OutputsIdenticalAnys(_Outputs): def __init__(self, op: Operator): super().__init__(identical_anys._spec().outputs, op) - self._included = Output(identical_anys._spec().output_pin(0), 0, op) + self._included: Output[bool] = Output( + identical_anys._spec().output_pin(0), 0, op + ) self._outputs.append(self._included) - self._message = Output(identical_anys._spec().output_pin(1), 1, op) + self._message: Output[str] = Output(identical_anys._spec().output_pin(1), 1, op) self._outputs.append(self._message) @property - def included(self) -> Output: + def included(self) -> Output[bool]: r"""Allows to get included output of the operator bool (true if belongs...) @@ -381,7 +393,7 @@ def included(self) -> Output: return self._included @property - def message(self) -> Output: + def message(self) -> Output[str]: r"""Allows to get message output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_fc.py b/src/ansys/dpf/core/operators/logic/identical_fc.py index 92e5ce61a3d..89cc053f568 100644 --- a/src/ansys/dpf/core/operators/logic/identical_fc.py +++ b/src/ansys/dpf/core/operators/logic/identical_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class identical_fc(Operator): r"""Checks if two fields_container are identical. @@ -201,17 +205,25 @@ class InputsIdenticalFc(_Inputs): def __init__(self, op: Operator): super().__init__(identical_fc._spec().inputs, op) - self._fields_containerA = Input(identical_fc._spec().input_pin(0), 0, op, -1) + self._fields_containerA: Input[FieldsContainer] = Input( + identical_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(identical_fc._spec().input_pin(1), 1, op, -1) + self._fields_containerB: Input[FieldsContainer] = Input( + identical_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fields_containerB) - self._small_value = Input(identical_fc._spec().input_pin(2), 2, op, -1) + self._small_value: Input[float] = Input( + identical_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._small_value) - self._tolerance = Input(identical_fc._spec().input_pin(3), 3, op, -1) + self._tolerance: Input[float] = Input( + identical_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._tolerance) @property - def fields_containerA(self) -> Input: + def fields_containerA(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerA input to the operator. Returns @@ -230,7 +242,7 @@ def fields_containerA(self) -> Input: return self._fields_containerA @property - def fields_containerB(self) -> Input: + def fields_containerB(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerB input to the operator. Returns @@ -249,7 +261,7 @@ def fields_containerB(self) -> Input: return self._fields_containerB @property - def small_value(self) -> Input: + def small_value(self) -> Input[float]: r"""Allows to connect small_value input to the operator. Double positive small value. Smallest value which will be considered during the comparison step. All the abs(values) in the field less than this value are considered as null, (default value:1.0e-14). @@ -270,7 +282,7 @@ def small_value(self) -> Input: return self._small_value @property - def tolerance(self) -> Input: + def tolerance(self) -> Input[float]: r"""Allows to connect tolerance input to the operator. Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001). @@ -306,13 +318,13 @@ class OutputsIdenticalFc(_Outputs): def __init__(self, op: Operator): super().__init__(identical_fc._spec().outputs, op) - self._boolean = Output(identical_fc._spec().output_pin(0), 0, op) + self._boolean: Output[bool] = Output(identical_fc._spec().output_pin(0), 0, op) self._outputs.append(self._boolean) - self._message = Output(identical_fc._spec().output_pin(1), 1, op) + self._message: Output[str] = Output(identical_fc._spec().output_pin(1), 1, op) self._outputs.append(self._message) @property - def boolean(self) -> Output: + def boolean(self) -> Output[bool]: r"""Allows to get boolean output of the operator bool (true if identical...) @@ -332,7 +344,7 @@ def boolean(self) -> Output: return self._boolean @property - def message(self) -> Output: + def message(self) -> Output[str]: r"""Allows to get message output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_fields.py b/src/ansys/dpf/core/operators/logic/identical_fields.py index 2c2b47f9782..b3fdf8eae3b 100644 --- a/src/ansys/dpf/core/operators/logic/identical_fields.py +++ b/src/ansys/dpf/core/operators/logic/identical_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class identical_fields(Operator): r"""Check if two fields are identical. @@ -201,17 +205,25 @@ class InputsIdenticalFields(_Inputs): def __init__(self, op: Operator): super().__init__(identical_fields._spec().inputs, op) - self._fieldA = Input(identical_fields._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field] = Input( + identical_fields._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(identical_fields._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field] = Input( + identical_fields._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) - self._double_value = Input(identical_fields._spec().input_pin(2), 2, op, -1) + self._double_value: Input[float] = Input( + identical_fields._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._double_value) - self._double_tolerance = Input(identical_fields._spec().input_pin(3), 3, op, -1) + self._double_tolerance: Input[float] = Input( + identical_fields._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._double_tolerance) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field]: r"""Allows to connect fieldA input to the operator. Returns @@ -230,7 +242,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field]: r"""Allows to connect fieldB input to the operator. Returns @@ -249,7 +261,7 @@ def fieldB(self) -> Input: return self._fieldB @property - def double_value(self) -> Input: + def double_value(self) -> Input[float]: r"""Allows to connect double_value input to the operator. Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14). @@ -270,7 +282,7 @@ def double_value(self) -> Input: return self._double_value @property - def double_tolerance(self) -> Input: + def double_tolerance(self) -> Input[float]: r"""Allows to connect double_tolerance input to the operator. Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1 - v2) / v2 < relativeTol. Default is 0.001. @@ -306,13 +318,17 @@ class OutputsIdenticalFields(_Outputs): def __init__(self, op: Operator): super().__init__(identical_fields._spec().outputs, op) - self._boolean = Output(identical_fields._spec().output_pin(0), 0, op) + self._boolean: Output[bool] = Output( + identical_fields._spec().output_pin(0), 0, op + ) self._outputs.append(self._boolean) - self._message = Output(identical_fields._spec().output_pin(1), 1, op) + self._message: Output[str] = Output( + identical_fields._spec().output_pin(1), 1, op + ) self._outputs.append(self._message) @property - def boolean(self) -> Output: + def boolean(self) -> Output[bool]: r"""Allows to get boolean output of the operator bool (true if identical...) @@ -332,7 +348,7 @@ def boolean(self) -> Output: return self._boolean @property - def message(self) -> Output: + def message(self) -> Output[str]: r"""Allows to get message output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_generic_data_containers.py b/src/ansys/dpf/core/operators/logic/identical_generic_data_containers.py index 1d2aacdfb31..eccea320e45 100644 --- a/src/ansys/dpf/core/operators/logic/identical_generic_data_containers.py +++ b/src/ansys/dpf/core/operators/logic/identical_generic_data_containers.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.generic_data_container import GenericDataContainer + class identical_generic_data_containers(Operator): r"""Takes two generic data containers and compares them. Supported types: @@ -231,29 +235,29 @@ class InputsIdenticalGenericDataContainers(_Inputs): def __init__(self, op: Operator): super().__init__(identical_generic_data_containers._spec().inputs, op) - self._generic_data_containerA = Input( + self._generic_data_containerA: Input[GenericDataContainer] = Input( identical_generic_data_containers._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._generic_data_containerA) - self._generic_data_containerB = Input( + self._generic_data_containerB: Input[GenericDataContainer] = Input( identical_generic_data_containers._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._generic_data_containerB) - self._double_value = Input( + self._double_value: Input[float] = Input( identical_generic_data_containers._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._double_value) - self._double_tolerance = Input( + self._double_tolerance: Input[float] = Input( identical_generic_data_containers._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._double_tolerance) - self._compare_auxiliary = Input( + self._compare_auxiliary: Input[bool] = Input( identical_generic_data_containers._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._compare_auxiliary) @property - def generic_data_containerA(self) -> Input: + def generic_data_containerA(self) -> Input[GenericDataContainer]: r"""Allows to connect generic_data_containerA input to the operator. Returns @@ -272,7 +276,7 @@ def generic_data_containerA(self) -> Input: return self._generic_data_containerA @property - def generic_data_containerB(self) -> Input: + def generic_data_containerB(self) -> Input[GenericDataContainer]: r"""Allows to connect generic_data_containerB input to the operator. Returns @@ -291,7 +295,7 @@ def generic_data_containerB(self) -> Input: return self._generic_data_containerB @property - def double_value(self) -> Input: + def double_value(self) -> Input[float]: r"""Allows to connect double_value input to the operator. Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14). @@ -312,7 +316,7 @@ def double_value(self) -> Input: return self._double_value @property - def double_tolerance(self) -> Input: + def double_tolerance(self) -> Input[float]: r"""Allows to connect double_tolerance input to the operator. Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001. @@ -333,7 +337,7 @@ def double_tolerance(self) -> Input: return self._double_tolerance @property - def compare_auxiliary(self) -> Input: + def compare_auxiliary(self) -> Input[bool]: r"""Allows to connect compare_auxiliary input to the operator. For meshes and meshescontainer: compare auxiliary data (i.e property fields, scopings...). Default value is 'false'. @@ -369,17 +373,17 @@ class OutputsIdenticalGenericDataContainers(_Outputs): def __init__(self, op: Operator): super().__init__(identical_generic_data_containers._spec().outputs, op) - self._included = Output( + self._included: Output[bool] = Output( identical_generic_data_containers._spec().output_pin(0), 0, op ) self._outputs.append(self._included) - self._message = Output( + self._message: Output[str] = Output( identical_generic_data_containers._spec().output_pin(1), 1, op ) self._outputs.append(self._message) @property - def included(self) -> Output: + def included(self) -> Output[bool]: r"""Allows to get included output of the operator bool (true if belongs...) @@ -399,7 +403,7 @@ def included(self) -> Output: return self._included @property - def message(self) -> Output: + def message(self) -> Output[str]: r"""Allows to get message output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_mc.py b/src/ansys/dpf/core/operators/logic/identical_mc.py index 228ca13a966..f055409732e 100644 --- a/src/ansys/dpf/core/operators/logic/identical_mc.py +++ b/src/ansys/dpf/core/operators/logic/identical_mc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshes_container import MeshesContainer + class identical_mc(Operator): r"""Checks if two meshes_container are identical. @@ -217,19 +221,29 @@ class InputsIdenticalMc(_Inputs): def __init__(self, op: Operator): super().__init__(identical_mc._spec().inputs, op) - self._meshes_containerA = Input(identical_mc._spec().input_pin(0), 0, op, -1) + self._meshes_containerA: Input[MeshesContainer] = Input( + identical_mc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._meshes_containerA) - self._meshes_containerB = Input(identical_mc._spec().input_pin(1), 1, op, -1) + self._meshes_containerB: Input[MeshesContainer] = Input( + identical_mc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._meshes_containerB) - self._small_value = Input(identical_mc._spec().input_pin(2), 2, op, -1) + self._small_value: Input[float] = Input( + identical_mc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._small_value) - self._tolerance = Input(identical_mc._spec().input_pin(3), 3, op, -1) + self._tolerance: Input[float] = Input( + identical_mc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._tolerance) - self._compare_auxiliary = Input(identical_mc._spec().input_pin(4), 4, op, -1) + self._compare_auxiliary: Input[bool] = Input( + identical_mc._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._compare_auxiliary) @property - def meshes_containerA(self) -> Input: + def meshes_containerA(self) -> Input[MeshesContainer]: r"""Allows to connect meshes_containerA input to the operator. Returns @@ -248,7 +262,7 @@ def meshes_containerA(self) -> Input: return self._meshes_containerA @property - def meshes_containerB(self) -> Input: + def meshes_containerB(self) -> Input[MeshesContainer]: r"""Allows to connect meshes_containerB input to the operator. Returns @@ -267,7 +281,7 @@ def meshes_containerB(self) -> Input: return self._meshes_containerB @property - def small_value(self) -> Input: + def small_value(self) -> Input[float]: r"""Allows to connect small_value input to the operator. Double positive small value. Smallest value which will be considered during the comparison step. All the abs(values) in the field less than this value are considered as null, (default value:1.0e-14). @@ -288,7 +302,7 @@ def small_value(self) -> Input: return self._small_value @property - def tolerance(self) -> Input: + def tolerance(self) -> Input[float]: r"""Allows to connect tolerance input to the operator. Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001). @@ -309,7 +323,7 @@ def tolerance(self) -> Input: return self._tolerance @property - def compare_auxiliary(self) -> Input: + def compare_auxiliary(self) -> Input[bool]: r"""Allows to connect compare_auxiliary input to the operator. compare auxiliary data (i.e property fields, scopings...). Default value is 'false'. @@ -345,13 +359,13 @@ class OutputsIdenticalMc(_Outputs): def __init__(self, op: Operator): super().__init__(identical_mc._spec().outputs, op) - self._boolean = Output(identical_mc._spec().output_pin(0), 0, op) + self._boolean: Output[bool] = Output(identical_mc._spec().output_pin(0), 0, op) self._outputs.append(self._boolean) - self._message = Output(identical_mc._spec().output_pin(1), 1, op) + self._message: Output[str] = Output(identical_mc._spec().output_pin(1), 1, op) self._outputs.append(self._message) @property - def boolean(self) -> Output: + def boolean(self) -> Output[bool]: r"""Allows to get boolean output of the operator bool (true if identical...) @@ -371,7 +385,7 @@ def boolean(self) -> Output: return self._boolean @property - def message(self) -> Output: + def message(self) -> Output[str]: r"""Allows to get message output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_meshes.py b/src/ansys/dpf/core/operators/logic/identical_meshes.py index 6bdd9c1adb2..cb256c285c4 100644 --- a/src/ansys/dpf/core/operators/logic/identical_meshes.py +++ b/src/ansys/dpf/core/operators/logic/identical_meshes.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + class identical_meshes(Operator): r"""Takes two meshes and compares them. Note: When comparing mesh @@ -212,21 +216,29 @@ class InputsIdenticalMeshes(_Inputs): def __init__(self, op: Operator): super().__init__(identical_meshes._spec().inputs, op) - self._meshA = Input(identical_meshes._spec().input_pin(0), 0, op, -1) + self._meshA: Input[MeshedRegion] = Input( + identical_meshes._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._meshA) - self._meshB = Input(identical_meshes._spec().input_pin(1), 1, op, -1) + self._meshB: Input[MeshedRegion] = Input( + identical_meshes._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._meshB) - self._small_value = Input(identical_meshes._spec().input_pin(2), 2, op, -1) + self._small_value: Input[float] = Input( + identical_meshes._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._small_value) - self._tolerance = Input(identical_meshes._spec().input_pin(3), 3, op, -1) + self._tolerance: Input[float] = Input( + identical_meshes._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._tolerance) - self._compare_auxiliary = Input( + self._compare_auxiliary: Input[bool] = Input( identical_meshes._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._compare_auxiliary) @property - def meshA(self) -> Input: + def meshA(self) -> Input[MeshedRegion]: r"""Allows to connect meshA input to the operator. Returns @@ -245,7 +257,7 @@ def meshA(self) -> Input: return self._meshA @property - def meshB(self) -> Input: + def meshB(self) -> Input[MeshedRegion]: r"""Allows to connect meshB input to the operator. Returns @@ -264,7 +276,7 @@ def meshB(self) -> Input: return self._meshB @property - def small_value(self) -> Input: + def small_value(self) -> Input[float]: r"""Allows to connect small_value input to the operator. define what is a small value for numeric comparison (default value:1.0e-14). @@ -285,7 +297,7 @@ def small_value(self) -> Input: return self._small_value @property - def tolerance(self) -> Input: + def tolerance(self) -> Input[float]: r"""Allows to connect tolerance input to the operator. define the relative tolerance ceil for numeric comparison (default is 0.001). @@ -306,7 +318,7 @@ def tolerance(self) -> Input: return self._tolerance @property - def compare_auxiliary(self) -> Input: + def compare_auxiliary(self) -> Input[bool]: r"""Allows to connect compare_auxiliary input to the operator. compare auxiliary data (i.e property fields, scopings...). Default value is 'false'. @@ -341,11 +353,13 @@ class OutputsIdenticalMeshes(_Outputs): def __init__(self, op: Operator): super().__init__(identical_meshes._spec().outputs, op) - self._are_identical = Output(identical_meshes._spec().output_pin(0), 0, op) + self._are_identical: Output[bool] = Output( + identical_meshes._spec().output_pin(0), 0, op + ) self._outputs.append(self._are_identical) @property - def are_identical(self) -> Output: + def are_identical(self) -> Output[bool]: r"""Allows to get are_identical output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_pfc.py b/src/ansys/dpf/core/operators/logic/identical_pfc.py index 7ffb127a141..359ea43f2d7 100644 --- a/src/ansys/dpf/core/operators/logic/identical_pfc.py +++ b/src/ansys/dpf/core/operators/logic/identical_pfc.py @@ -173,11 +173,11 @@ class InputsIdenticalPfc(_Inputs): def __init__(self, op: Operator): super().__init__(identical_pfc._spec().inputs, op) - self._property_fields_containerA = Input( + self._property_fields_containerA: Input = Input( identical_pfc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._property_fields_containerA) - self._property_fields_containerB = Input( + self._property_fields_containerB: Input = Input( identical_pfc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._property_fields_containerB) @@ -236,13 +236,13 @@ class OutputsIdenticalPfc(_Outputs): def __init__(self, op: Operator): super().__init__(identical_pfc._spec().outputs, op) - self._boolean = Output(identical_pfc._spec().output_pin(0), 0, op) + self._boolean: Output[bool] = Output(identical_pfc._spec().output_pin(0), 0, op) self._outputs.append(self._boolean) - self._message = Output(identical_pfc._spec().output_pin(1), 1, op) + self._message: Output[str] = Output(identical_pfc._spec().output_pin(1), 1, op) self._outputs.append(self._message) @property - def boolean(self) -> Output: + def boolean(self) -> Output[bool]: r"""Allows to get boolean output of the operator bool (true if identical...) @@ -262,7 +262,7 @@ def boolean(self) -> Output: return self._boolean @property - def message(self) -> Output: + def message(self) -> Output[str]: r"""Allows to get message output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_property_fields.py b/src/ansys/dpf/core/operators/logic/identical_property_fields.py index b0edc313f36..75343079a0d 100644 --- a/src/ansys/dpf/core/operators/logic/identical_property_fields.py +++ b/src/ansys/dpf/core/operators/logic/identical_property_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + class identical_property_fields(Operator): r"""Takes two property fields and compares them. @@ -164,17 +168,17 @@ class InputsIdenticalPropertyFields(_Inputs): def __init__(self, op: Operator): super().__init__(identical_property_fields._spec().inputs, op) - self._property_fieldA = Input( + self._property_fieldA: Input[MeshedRegion] = Input( identical_property_fields._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._property_fieldA) - self._property_fieldB = Input( + self._property_fieldB: Input[MeshedRegion] = Input( identical_property_fields._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._property_fieldB) @property - def property_fieldA(self) -> Input: + def property_fieldA(self) -> Input[MeshedRegion]: r"""Allows to connect property_fieldA input to the operator. Returns @@ -193,7 +197,7 @@ def property_fieldA(self) -> Input: return self._property_fieldA @property - def property_fieldB(self) -> Input: + def property_fieldB(self) -> Input[MeshedRegion]: r"""Allows to connect property_fieldB input to the operator. Returns @@ -227,17 +231,17 @@ class OutputsIdenticalPropertyFields(_Outputs): def __init__(self, op: Operator): super().__init__(identical_property_fields._spec().outputs, op) - self._are_identical = Output( + self._are_identical: Output[bool] = Output( identical_property_fields._spec().output_pin(0), 0, op ) self._outputs.append(self._are_identical) - self._information = Output( + self._information: Output[str] = Output( identical_property_fields._spec().output_pin(1), 1, op ) self._outputs.append(self._information) @property - def are_identical(self) -> Output: + def are_identical(self) -> Output[bool]: r"""Allows to get are_identical output of the operator Returns @@ -255,7 +259,7 @@ def are_identical(self) -> Output: return self._are_identical @property - def information(self) -> Output: + def information(self) -> Output[str]: r"""Allows to get information output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_sc.py b/src/ansys/dpf/core/operators/logic/identical_sc.py index 4c482572571..01f622a42e6 100644 --- a/src/ansys/dpf/core/operators/logic/identical_sc.py +++ b/src/ansys/dpf/core/operators/logic/identical_sc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scopings_container import ScopingsContainer + class identical_sc(Operator): r"""Checks if two scopings_container are identical. @@ -173,13 +177,17 @@ class InputsIdenticalSc(_Inputs): def __init__(self, op: Operator): super().__init__(identical_sc._spec().inputs, op) - self._scopings_containerA = Input(identical_sc._spec().input_pin(0), 0, op, -1) + self._scopings_containerA: Input[ScopingsContainer] = Input( + identical_sc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._scopings_containerA) - self._scopings_containerB = Input(identical_sc._spec().input_pin(1), 1, op, -1) + self._scopings_containerB: Input[ScopingsContainer] = Input( + identical_sc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scopings_containerB) @property - def scopings_containerA(self) -> Input: + def scopings_containerA(self) -> Input[ScopingsContainer]: r"""Allows to connect scopings_containerA input to the operator. Returns @@ -198,7 +206,7 @@ def scopings_containerA(self) -> Input: return self._scopings_containerA @property - def scopings_containerB(self) -> Input: + def scopings_containerB(self) -> Input[ScopingsContainer]: r"""Allows to connect scopings_containerB input to the operator. Returns @@ -232,13 +240,13 @@ class OutputsIdenticalSc(_Outputs): def __init__(self, op: Operator): super().__init__(identical_sc._spec().outputs, op) - self._boolean = Output(identical_sc._spec().output_pin(0), 0, op) + self._boolean: Output[bool] = Output(identical_sc._spec().output_pin(0), 0, op) self._outputs.append(self._boolean) - self._message = Output(identical_sc._spec().output_pin(1), 1, op) + self._message: Output[str] = Output(identical_sc._spec().output_pin(1), 1, op) self._outputs.append(self._message) @property - def boolean(self) -> Output: + def boolean(self) -> Output[bool]: r"""Allows to get boolean output of the operator bool (true if identical...) @@ -258,7 +266,7 @@ def boolean(self) -> Output: return self._boolean @property - def message(self) -> Output: + def message(self) -> Output[str]: r"""Allows to get message output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_scopings.py b/src/ansys/dpf/core/operators/logic/identical_scopings.py index 29636ce3440..fe628861e45 100644 --- a/src/ansys/dpf/core/operators/logic/identical_scopings.py +++ b/src/ansys/dpf/core/operators/logic/identical_scopings.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scoping import Scoping + class identical_scopings(Operator): r"""Check if two scopings are identical. @@ -163,13 +167,17 @@ class InputsIdenticalScopings(_Inputs): def __init__(self, op: Operator): super().__init__(identical_scopings._spec().inputs, op) - self._scopingA = Input(identical_scopings._spec().input_pin(0), 0, op, -1) + self._scopingA: Input[Scoping] = Input( + identical_scopings._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._scopingA) - self._scopingB = Input(identical_scopings._spec().input_pin(1), 1, op, -1) + self._scopingB: Input[Scoping] = Input( + identical_scopings._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scopingB) @property - def scopingA(self) -> Input: + def scopingA(self) -> Input[Scoping]: r"""Allows to connect scopingA input to the operator. Returns @@ -188,7 +196,7 @@ def scopingA(self) -> Input: return self._scopingA @property - def scopingB(self) -> Input: + def scopingB(self) -> Input[Scoping]: r"""Allows to connect scopingB input to the operator. Returns @@ -222,13 +230,17 @@ class OutputsIdenticalScopings(_Outputs): def __init__(self, op: Operator): super().__init__(identical_scopings._spec().outputs, op) - self._boolean = Output(identical_scopings._spec().output_pin(0), 0, op) + self._boolean: Output[bool] = Output( + identical_scopings._spec().output_pin(0), 0, op + ) self._outputs.append(self._boolean) - self._message = Output(identical_scopings._spec().output_pin(1), 1, op) + self._message: Output[str] = Output( + identical_scopings._spec().output_pin(1), 1, op + ) self._outputs.append(self._message) @property - def boolean(self) -> Output: + def boolean(self) -> Output[bool]: r"""Allows to get boolean output of the operator bool (true if identical...) @@ -248,7 +260,7 @@ def boolean(self) -> Output: return self._boolean @property - def message(self) -> Output: + def message(self) -> Output[str]: r"""Allows to get message output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/identical_string_fields.py b/src/ansys/dpf/core/operators/logic/identical_string_fields.py index 7ea55d7a1a2..5dd4cf0878e 100644 --- a/src/ansys/dpf/core/operators/logic/identical_string_fields.py +++ b/src/ansys/dpf/core/operators/logic/identical_string_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.string_field import StringField + class identical_string_fields(Operator): r"""Takes two string fields and compares them. @@ -164,17 +168,17 @@ class InputsIdenticalStringFields(_Inputs): def __init__(self, op: Operator): super().__init__(identical_string_fields._spec().inputs, op) - self._string_fieldA = Input( + self._string_fieldA: Input[StringField] = Input( identical_string_fields._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._string_fieldA) - self._string_fieldB = Input( + self._string_fieldB: Input[StringField] = Input( identical_string_fields._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._string_fieldB) @property - def string_fieldA(self) -> Input: + def string_fieldA(self) -> Input[StringField]: r"""Allows to connect string_fieldA input to the operator. Returns @@ -193,7 +197,7 @@ def string_fieldA(self) -> Input: return self._string_fieldA @property - def string_fieldB(self) -> Input: + def string_fieldB(self) -> Input[StringField]: r"""Allows to connect string_fieldB input to the operator. Returns @@ -227,15 +231,17 @@ class OutputsIdenticalStringFields(_Outputs): def __init__(self, op: Operator): super().__init__(identical_string_fields._spec().outputs, op) - self._are_identical = Output( + self._are_identical: Output[bool] = Output( identical_string_fields._spec().output_pin(0), 0, op ) self._outputs.append(self._are_identical) - self._information = Output(identical_string_fields._spec().output_pin(1), 1, op) + self._information: Output[str] = Output( + identical_string_fields._spec().output_pin(1), 1, op + ) self._outputs.append(self._information) @property - def are_identical(self) -> Output: + def are_identical(self) -> Output[bool]: r"""Allows to get are_identical output of the operator Returns @@ -253,7 +259,7 @@ def are_identical(self) -> Output: return self._are_identical @property - def information(self) -> Output: + def information(self) -> Output[str]: r"""Allows to get information output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/included_fields.py b/src/ansys/dpf/core/operators/logic/included_fields.py index c028cac9b33..0f7c252e60f 100644 --- a/src/ansys/dpf/core/operators/logic/included_fields.py +++ b/src/ansys/dpf/core/operators/logic/included_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class included_fields(Operator): r"""Checks if one field belongs to another. @@ -201,17 +205,25 @@ class InputsIncludedFields(_Inputs): def __init__(self, op: Operator): super().__init__(included_fields._spec().inputs, op) - self._fieldA = Input(included_fields._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field] = Input( + included_fields._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(included_fields._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field] = Input( + included_fields._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) - self._double_value = Input(included_fields._spec().input_pin(2), 2, op, -1) + self._double_value: Input[float] = Input( + included_fields._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._double_value) - self._double_tolerance = Input(included_fields._spec().input_pin(3), 3, op, -1) + self._double_tolerance: Input[float] = Input( + included_fields._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._double_tolerance) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field]: r"""Allows to connect fieldA input to the operator. Returns @@ -230,7 +242,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field]: r"""Allows to connect fieldB input to the operator. Returns @@ -249,7 +261,7 @@ def fieldB(self) -> Input: return self._fieldB @property - def double_value(self) -> Input: + def double_value(self) -> Input[float]: r"""Allows to connect double_value input to the operator. Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14). @@ -270,7 +282,7 @@ def double_value(self) -> Input: return self._double_value @property - def double_tolerance(self) -> Input: + def double_tolerance(self) -> Input[float]: r"""Allows to connect double_tolerance input to the operator. Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001. @@ -306,13 +318,17 @@ class OutputsIncludedFields(_Outputs): def __init__(self, op: Operator): super().__init__(included_fields._spec().outputs, op) - self._included = Output(included_fields._spec().output_pin(0), 0, op) + self._included: Output[bool] = Output( + included_fields._spec().output_pin(0), 0, op + ) self._outputs.append(self._included) - self._message = Output(included_fields._spec().output_pin(1), 1, op) + self._message: Output[str] = Output( + included_fields._spec().output_pin(1), 1, op + ) self._outputs.append(self._message) @property - def included(self) -> Output: + def included(self) -> Output[bool]: r"""Allows to get included output of the operator bool (true if belongs...) @@ -332,7 +348,7 @@ def included(self) -> Output: return self._included @property - def message(self) -> Output: + def message(self) -> Output[str]: r"""Allows to get message output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/solid_shell_fields.py b/src/ansys/dpf/core/operators/logic/solid_shell_fields.py index 2a8f0b419ae..a214b70184b 100644 --- a/src/ansys/dpf/core/operators/logic/solid_shell_fields.py +++ b/src/ansys/dpf/core/operators/logic/solid_shell_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class solid_shell_fields(Operator): r"""Merges shell and solid fields for each time step/frequency in the fields @@ -142,13 +146,13 @@ class InputsSolidShellFields(_Inputs): def __init__(self, op: Operator): super().__init__(solid_shell_fields._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( solid_shell_fields._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -181,11 +185,13 @@ class OutputsSolidShellFields(_Outputs): def __init__(self, op: Operator): super().__init__(solid_shell_fields._spec().outputs, op) - self._fields_container = Output(solid_shell_fields._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + solid_shell_fields._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/logic/split_data_sources.py b/src/ansys/dpf/core/operators/logic/split_data_sources.py index 8f7c903cde7..6df807c7064 100644 --- a/src/ansys/dpf/core/operators/logic/split_data_sources.py +++ b/src/ansys/dpf/core/operators/logic/split_data_sources.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + class split_data_sources(Operator): r"""Splits a Data Sources into multiple coherent data sources, actual number @@ -179,13 +183,17 @@ class InputsSplitDataSources(_Inputs): def __init__(self, op: Operator): super().__init__(split_data_sources._spec().inputs, op) - self._data_sources = Input(split_data_sources._spec().input_pin(0), 0, op, -1) + self._data_sources: Input[DataSources] = Input( + split_data_sources._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._data_sources) - self._output_count = Input(split_data_sources._spec().input_pin(1), 1, op, -1) + self._output_count: Input[int] = Input( + split_data_sources._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._output_count) @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data sources to split. @@ -206,7 +214,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def output_count(self) -> Input: + def output_count(self) -> Input[int]: r"""Allows to connect output_count input to the operator. Number of desired outputs. @@ -243,15 +251,21 @@ class OutputsSplitDataSources(_Outputs): def __init__(self, op: Operator): super().__init__(split_data_sources._spec().outputs, op) - self._output_count = Output(split_data_sources._spec().output_pin(-1), -1, op) + self._output_count: Output[int] = Output( + split_data_sources._spec().output_pin(-1), -1, op + ) self._outputs.append(self._output_count) - self._outputs1 = Output(split_data_sources._spec().output_pin(0), 0, op) + self._outputs1: Output[DataSources] = Output( + split_data_sources._spec().output_pin(0), 0, op + ) self._outputs.append(self._outputs1) - self._outputs2 = Output(split_data_sources._spec().output_pin(1), 1, op) + self._outputs2: Output[DataSources] = Output( + split_data_sources._spec().output_pin(1), 1, op + ) self._outputs.append(self._outputs2) @property - def output_count(self) -> Output: + def output_count(self) -> Output[int]: r"""Allows to get output_count output of the operator Actual number of outputs. @@ -271,7 +285,7 @@ def output_count(self) -> Output: return self._output_count @property - def outputs1(self) -> Output: + def outputs1(self) -> Output[DataSources]: r"""Allows to get outputs1 output of the operator Data sources outputs. @@ -291,7 +305,7 @@ def outputs1(self) -> Output: return self._outputs1 @property - def outputs2(self) -> Output: + def outputs2(self) -> Output[DataSources]: r"""Allows to get outputs2 output of the operator Data sources outputs. diff --git a/src/ansys/dpf/core/operators/logic/split_streams.py b/src/ansys/dpf/core/operators/logic/split_streams.py index 63ec8cd8683..39183d5e3fa 100644 --- a/src/ansys/dpf/core/operators/logic/split_streams.py +++ b/src/ansys/dpf/core/operators/logic/split_streams.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.streams_container import StreamsContainer + class split_streams(Operator): r"""Splits a Streams into multiple coherent streams, actual number of @@ -177,13 +181,17 @@ class InputsSplitStreams(_Inputs): def __init__(self, op: Operator): super().__init__(split_streams._spec().inputs, op) - self._streams = Input(split_streams._spec().input_pin(0), 0, op, -1) + self._streams: Input[StreamsContainer] = Input( + split_streams._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._streams) - self._output_count = Input(split_streams._spec().input_pin(1), 1, op, -1) + self._output_count: Input[int] = Input( + split_streams._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._output_count) @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Streams to split. @@ -204,7 +212,7 @@ def streams(self) -> Input: return self._streams @property - def output_count(self) -> Input: + def output_count(self) -> Input[int]: r"""Allows to connect output_count input to the operator. Number of desired outputs. @@ -241,15 +249,21 @@ class OutputsSplitStreams(_Outputs): def __init__(self, op: Operator): super().__init__(split_streams._spec().outputs, op) - self._output_count = Output(split_streams._spec().output_pin(-1), -1, op) + self._output_count: Output[int] = Output( + split_streams._spec().output_pin(-1), -1, op + ) self._outputs.append(self._output_count) - self._outputs1 = Output(split_streams._spec().output_pin(0), 0, op) + self._outputs1: Output[StreamsContainer] = Output( + split_streams._spec().output_pin(0), 0, op + ) self._outputs.append(self._outputs1) - self._outputs2 = Output(split_streams._spec().output_pin(1), 1, op) + self._outputs2: Output[StreamsContainer] = Output( + split_streams._spec().output_pin(1), 1, op + ) self._outputs.append(self._outputs2) @property - def output_count(self) -> Output: + def output_count(self) -> Output[int]: r"""Allows to get output_count output of the operator Actual number of outputs. @@ -269,7 +283,7 @@ def output_count(self) -> Output: return self._output_count @property - def outputs1(self) -> Output: + def outputs1(self) -> Output[StreamsContainer]: r"""Allows to get outputs1 output of the operator Streams outputs. @@ -289,7 +303,7 @@ def outputs1(self) -> Output: return self._outputs1 @property - def outputs2(self) -> Output: + def outputs2(self) -> Output[StreamsContainer]: r"""Allows to get outputs2 output of the operator Streams outputs. diff --git a/src/ansys/dpf/core/operators/mapping/fft.py b/src/ansys/dpf/core/operators/mapping/fft.py index f0fba574b4c..494d3c977b6 100644 --- a/src/ansys/dpf/core/operators/mapping/fft.py +++ b/src/ansys/dpf/core/operators/mapping/fft.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class fft(Operator): r"""Computes the Fast Fourier Transform on each component of input Field or @@ -243,21 +248,29 @@ class InputsFft(_Inputs): def __init__(self, op: Operator): super().__init__(fft._spec().inputs, op) - self._field = Input(fft._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + fft._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._scale_forward_transform = Input(fft._spec().input_pin(3), 3, op, -1) + self._scale_forward_transform: Input[float] = Input( + fft._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scale_forward_transform) - self._inplace = Input(fft._spec().input_pin(4), 4, op, -1) + self._inplace: Input[bool] = Input(fft._spec().input_pin(4), 4, op, -1) self._inputs.append(self._inplace) - self._force_fft_points = Input(fft._spec().input_pin(5), 5, op, -1) + self._force_fft_points: Input[int] = Input(fft._spec().input_pin(5), 5, op, -1) self._inputs.append(self._force_fft_points) - self._cutoff_frequency = Input(fft._spec().input_pin(6), 6, op, -1) + self._cutoff_frequency: Input[float] = Input( + fft._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._cutoff_frequency) - self._scale_right_amplitude = Input(fft._spec().input_pin(7), 7, op, -1) + self._scale_right_amplitude: Input[bool] = Input( + fft._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._scale_right_amplitude) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Field or Fields Container. @@ -278,7 +291,7 @@ def field(self) -> Input: return self._field @property - def scale_forward_transform(self) -> Input: + def scale_forward_transform(self) -> Input[float]: r"""Allows to connect scale_forward_transform input to the operator. Scale for Forward Transform, default is 2/field_num_elementary_data. @@ -299,7 +312,7 @@ def scale_forward_transform(self) -> Input: return self._scale_forward_transform @property - def inplace(self) -> Input: + def inplace(self) -> Input[bool]: r"""Allows to connect inplace input to the operator. True if inplace, default is false. @@ -320,7 +333,7 @@ def inplace(self) -> Input: return self._inplace @property - def force_fft_points(self) -> Input: + def force_fft_points(self) -> Input[int]: r"""Allows to connect force_fft_points input to the operator. Explicitely define number of fft points to either rescope or perform zero padding. @@ -341,7 +354,7 @@ def force_fft_points(self) -> Input: return self._force_fft_points @property - def cutoff_frequency(self) -> Input: + def cutoff_frequency(self) -> Input[float]: r"""Allows to connect cutoff_frequency input to the operator. Restrict output frequency up to this cutoff frequency @@ -362,7 +375,7 @@ def cutoff_frequency(self) -> Input: return self._cutoff_frequency @property - def scale_right_amplitude(self) -> Input: + def scale_right_amplitude(self) -> Input[bool]: r"""Allows to connect scale_right_amplitude input to the operator. If set to true (default is false), 2/field_num_entities scaling will be applied, to have right amplitude values. @@ -397,11 +410,13 @@ class OutputsFft(_Outputs): def __init__(self, op: Operator): super().__init__(fft._spec().outputs, op) - self._fields_container = Output(fft._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + fft._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Output Complex Fields Container with labels matching input Fields Container. No supports binded, but prepare_sampling_fft provides it. diff --git a/src/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py b/src/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py index 6ef55ead003..2ff8081dd61 100644 --- a/src/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py +++ b/src/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scopings_container import ScopingsContainer + class find_reduced_coordinates(Operator): r"""Finds the elements corresponding to the given coordinates in input and @@ -194,19 +202,23 @@ class InputsFindReducedCoordinates(_Inputs): def __init__(self, op: Operator): super().__init__(find_reduced_coordinates._spec().inputs, op) - self._coordinates = Input( - find_reduced_coordinates._spec().input_pin(1), 1, op, -1 - ) + self._coordinates: Input[ + Field | FieldsContainer | MeshedRegion | MeshesContainer + ] = Input(find_reduced_coordinates._spec().input_pin(1), 1, op, -1) self._inputs.append(self._coordinates) - self._mesh = Input(find_reduced_coordinates._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + find_reduced_coordinates._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._use_quadratic_elements = Input( + self._use_quadratic_elements: Input[bool] = Input( find_reduced_coordinates._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._use_quadratic_elements) @property - def coordinates(self) -> Input: + def coordinates( + self, + ) -> Input[Field | FieldsContainer | MeshedRegion | MeshesContainer]: r"""Allows to connect coordinates input to the operator. Returns @@ -225,7 +237,7 @@ def coordinates(self) -> Input: return self._coordinates @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. If the first field in input has no mesh in support, then the mesh in this pin is expected (default is false). If a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container. @@ -246,7 +258,7 @@ def mesh(self) -> Input: return self._mesh @property - def use_quadratic_elements(self) -> Input: + def use_quadratic_elements(self) -> Input[bool]: r"""Allows to connect use_quadratic_elements input to the operator. If this pin is set to true, reduced coordinates are computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false. @@ -282,17 +294,17 @@ class OutputsFindReducedCoordinates(_Outputs): def __init__(self, op: Operator): super().__init__(find_reduced_coordinates._spec().outputs, op) - self._reduced_coordinates = Output( + self._reduced_coordinates: Output[FieldsContainer] = Output( find_reduced_coordinates._spec().output_pin(0), 0, op ) self._outputs.append(self._reduced_coordinates) - self._element_ids = Output( + self._element_ids: Output[ScopingsContainer] = Output( find_reduced_coordinates._spec().output_pin(1), 1, op ) self._outputs.append(self._element_ids) @property - def reduced_coordinates(self) -> Output: + def reduced_coordinates(self) -> Output[FieldsContainer]: r"""Allows to get reduced_coordinates output of the operator coordinates in the reference elements @@ -312,7 +324,7 @@ def reduced_coordinates(self) -> Output: return self._reduced_coordinates @property - def element_ids(self) -> Output: + def element_ids(self) -> Output[ScopingsContainer]: r"""Allows to get element_ids output of the operator Ids of the elements where each set of reduced coordinates is found diff --git a/src/ansys/dpf/core/operators/mapping/on_coordinates.py b/src/ansys/dpf/core/operators/mapping/on_coordinates.py index b1ad6619ffb..0fc05257abe 100644 --- a/src/ansys/dpf/core/operators/mapping/on_coordinates.py +++ b/src/ansys/dpf/core/operators/mapping/on_coordinates.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + class on_coordinates(Operator): r"""Evaluates a result on specified coordinates (interpolates results inside @@ -247,25 +254,37 @@ class InputsOnCoordinates(_Inputs): def __init__(self, op: Operator): super().__init__(on_coordinates._spec().inputs, op) - self._fields_container = Input(on_coordinates._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + on_coordinates._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._coordinates = Input(on_coordinates._spec().input_pin(1), 1, op, -1) + self._coordinates: Input[ + Field | FieldsContainer | MeshedRegion | MeshesContainer + ] = Input(on_coordinates._spec().input_pin(1), 1, op, -1) self._inputs.append(self._coordinates) - self._create_support = Input(on_coordinates._spec().input_pin(2), 2, op, -1) + self._create_support: Input[bool] = Input( + on_coordinates._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._create_support) - self._mapping_on_scoping = Input(on_coordinates._spec().input_pin(3), 3, op, -1) + self._mapping_on_scoping: Input[bool] = Input( + on_coordinates._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._mapping_on_scoping) - self._tolerance = Input(on_coordinates._spec().input_pin(5), 5, op, -1) + self._tolerance: Input[float] = Input( + on_coordinates._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._tolerance) - self._mesh = Input(on_coordinates._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + on_coordinates._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._use_quadratic_elements = Input( + self._use_quadratic_elements: Input[bool] = Input( on_coordinates._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._use_quadratic_elements) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -284,7 +303,9 @@ def fields_container(self) -> Input: return self._fields_container @property - def coordinates(self) -> Input: + def coordinates( + self, + ) -> Input[Field | FieldsContainer | MeshedRegion | MeshesContainer]: r"""Allows to connect coordinates input to the operator. Returns @@ -303,7 +324,7 @@ def coordinates(self) -> Input: return self._coordinates @property - def create_support(self) -> Input: + def create_support(self) -> Input[bool]: r"""Allows to connect create_support input to the operator. if this pin is set to true, then, a support associated to the fields consisting of points is created @@ -324,7 +345,7 @@ def create_support(self) -> Input: return self._create_support @property - def mapping_on_scoping(self) -> Input: + def mapping_on_scoping(self) -> Input[bool]: r"""Allows to connect mapping_on_scoping input to the operator. if this pin is set to true, then the mapping between the coordinates and the fields is created only on the first field scoping @@ -345,7 +366,7 @@ def mapping_on_scoping(self) -> Input: return self._mapping_on_scoping @property - def tolerance(self) -> Input: + def tolerance(self) -> Input[float]: r"""Allows to connect tolerance input to the operator. Tolerance used in the iterative algorithm to locate coordinates inside the mesh. Default value: 5e-5. @@ -366,7 +387,7 @@ def tolerance(self) -> Input: return self._tolerance @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container @@ -387,7 +408,7 @@ def mesh(self) -> Input: return self._mesh @property - def use_quadratic_elements(self) -> Input: + def use_quadratic_elements(self) -> Input[bool]: r"""Allows to connect use_quadratic_elements input to the operator. If this pin is set to true, the element search for each coordinate is computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false. @@ -422,11 +443,13 @@ class OutputsOnCoordinates(_Outputs): def __init__(self, op: Operator): super().__init__(on_coordinates._spec().outputs, op) - self._fields_container = Output(on_coordinates._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + on_coordinates._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py b/src/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py index a4e175c6507..110c6dd56b3 100644 --- a/src/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py +++ b/src/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scopings_container import ScopingsContainer + class on_reduced_coordinates(Operator): r"""Evaluates a result on specified reduced coordinates of given elements @@ -211,27 +219,29 @@ class InputsOnReducedCoordinates(_Inputs): def __init__(self, op: Operator): super().__init__(on_reduced_coordinates._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( on_reduced_coordinates._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._reduced_coordinates = Input( + self._reduced_coordinates: Input[Field | FieldsContainer] = Input( on_reduced_coordinates._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._reduced_coordinates) - self._element_ids = Input( + self._element_ids: Input[ScopingsContainer] = Input( on_reduced_coordinates._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._element_ids) - self._mesh = Input(on_reduced_coordinates._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + on_reduced_coordinates._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._use_quadratic_elements = Input( + self._use_quadratic_elements: Input[bool] = Input( on_reduced_coordinates._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._use_quadratic_elements) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -250,7 +260,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def reduced_coordinates(self) -> Input: + def reduced_coordinates(self) -> Input[Field | FieldsContainer]: r"""Allows to connect reduced_coordinates input to the operator. coordinates in the reference elements to find (found with the operator "find_reduced_coordinates") @@ -271,7 +281,7 @@ def reduced_coordinates(self) -> Input: return self._reduced_coordinates @property - def element_ids(self) -> Input: + def element_ids(self) -> Input[ScopingsContainer]: r"""Allows to connect element_ids input to the operator. Ids of the elements where each set of reduced coordinates is found (found with the operator "find_reduced_coordinates") @@ -292,7 +302,7 @@ def element_ids(self) -> Input: return self._element_ids @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container @@ -313,7 +323,7 @@ def mesh(self) -> Input: return self._mesh @property - def use_quadratic_elements(self) -> Input: + def use_quadratic_elements(self) -> Input[bool]: r"""Allows to connect use_quadratic_elements input to the operator. If this pin is set to true, the interpolation is computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false. To use only when results have mid side nodes values. @@ -348,13 +358,13 @@ class OutputsOnReducedCoordinates(_Outputs): def __init__(self, op: Operator): super().__init__(on_reduced_coordinates._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( on_reduced_coordinates._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mapping/prep_sampling_fft.py b/src/ansys/dpf/core/operators/mapping/prep_sampling_fft.py index c0bdca49eaf..01fcea7adfb 100644 --- a/src/ansys/dpf/core/operators/mapping/prep_sampling_fft.py +++ b/src/ansys/dpf/core/operators/mapping/prep_sampling_fft.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class prep_sampling_fft(Operator): r"""Prepare time sampling optimum for FFT computation and expected @@ -190,21 +194,21 @@ class InputsPrepSamplingFft(_Inputs): def __init__(self, op: Operator): super().__init__(prep_sampling_fft._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( prep_sampling_fft._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._cutoff_frequency = Input( + self._cutoff_frequency: Input[float] = Input( prep_sampling_fft._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._cutoff_frequency) - self._number_sampling_point = Input( + self._number_sampling_point: Input[int] = Input( prep_sampling_fft._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._number_sampling_point) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Initial time domain TimeFreqSupport. @@ -225,7 +229,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def cutoff_frequency(self) -> Input: + def cutoff_frequency(self) -> Input[float]: r"""Allows to connect cutoff_frequency input to the operator. Cutoff Frequency. In this case, number of points is calculated computing (time_range * cutoff_freq * 2) and taking the next power of 2 (optimum for fft calculation). @@ -246,7 +250,7 @@ def cutoff_frequency(self) -> Input: return self._cutoff_frequency @property - def number_sampling_point(self) -> Input: + def number_sampling_point(self) -> Input[int]: r"""Allows to connect number_sampling_point input to the operator. For number of sampling point (calculation with cutoff_frequency is ignored). @@ -282,13 +286,17 @@ class OutputsPrepSamplingFft(_Outputs): def __init__(self, op: Operator): super().__init__(prep_sampling_fft._spec().outputs, op) - self._time_tfs_sampled = Output(prep_sampling_fft._spec().output_pin(0), 0, op) + self._time_tfs_sampled: Output[TimeFreqSupport] = Output( + prep_sampling_fft._spec().output_pin(0), 0, op + ) self._outputs.append(self._time_tfs_sampled) - self._freq_tfs_fft = Output(prep_sampling_fft._spec().output_pin(1), 1, op) + self._freq_tfs_fft: Output[TimeFreqSupport] = Output( + prep_sampling_fft._spec().output_pin(1), 1, op + ) self._outputs.append(self._freq_tfs_fft) @property - def time_tfs_sampled(self) -> Output: + def time_tfs_sampled(self) -> Output[TimeFreqSupport]: r"""Allows to get time_tfs_sampled output of the operator Optimum sampled time domain TimeFreqSupport. @@ -308,7 +316,7 @@ def time_tfs_sampled(self) -> Output: return self._time_tfs_sampled @property - def freq_tfs_fft(self) -> Output: + def freq_tfs_fft(self) -> Output[TimeFreqSupport]: r"""Allows to get freq_tfs_fft output of the operator Frequency domain TimeFreqSupport expected in output of FFT. diff --git a/src/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py b/src/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py index 11d6627a9d6..3a29ba8da41 100644 --- a/src/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py +++ b/src/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.workflow import Workflow + class prepare_mapping_workflow(Operator): r"""Generates a workflow that can map results from a support to another one. @@ -191,25 +197,25 @@ class InputsPrepareMappingWorkflow(_Inputs): def __init__(self, op: Operator): super().__init__(prepare_mapping_workflow._spec().inputs, op) - self._input_support = Input( + self._input_support: Input[Field | MeshedRegion] = Input( prepare_mapping_workflow._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._input_support) - self._output_support = Input( + self._output_support: Input[Field | MeshedRegion] = Input( prepare_mapping_workflow._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._output_support) - self._filter_radius = Input( + self._filter_radius: Input[float] = Input( prepare_mapping_workflow._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._filter_radius) - self._influence_box = Input( + self._influence_box: Input[float] = Input( prepare_mapping_workflow._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._influence_box) @property - def input_support(self) -> Input: + def input_support(self) -> Input[Field | MeshedRegion]: r"""Allows to connect input_support input to the operator. Returns @@ -228,7 +234,7 @@ def input_support(self) -> Input: return self._input_support @property - def output_support(self) -> Input: + def output_support(self) -> Input[Field | MeshedRegion]: r"""Allows to connect output_support input to the operator. Returns @@ -247,7 +253,7 @@ def output_support(self) -> Input: return self._output_support @property - def filter_radius(self) -> Input: + def filter_radius(self) -> Input[float]: r"""Allows to connect filter_radius input to the operator. Radius size for the RBF filter @@ -268,7 +274,7 @@ def filter_radius(self) -> Input: return self._filter_radius @property - def influence_box(self) -> Input: + def influence_box(self) -> Input[float]: r"""Allows to connect influence_box input to the operator. Returns @@ -301,13 +307,13 @@ class OutputsPrepareMappingWorkflow(_Outputs): def __init__(self, op: Operator): super().__init__(prepare_mapping_workflow._spec().outputs, op) - self._mapping_workflow = Output( + self._mapping_workflow: Output[Workflow] = Output( prepare_mapping_workflow._spec().output_pin(0), 0, op ) self._outputs.append(self._mapping_workflow) @property - def mapping_workflow(self) -> Output: + def mapping_workflow(self) -> Output[Workflow]: r"""Allows to get mapping_workflow output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py b/src/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py index e0799b4c1df..8ea7627067f 100644 --- a/src/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py +++ b/src/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class scoping_on_coordinates(Operator): r"""Finds the Elemental scoping of a set of coordinates. @@ -154,15 +160,17 @@ class InputsScopingOnCoordinates(_Inputs): def __init__(self, op: Operator): super().__init__(scoping_on_coordinates._spec().inputs, op) - self._coordinates = Input( + self._coordinates: Input[Field] = Input( scoping_on_coordinates._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._coordinates) - self._mesh = Input(scoping_on_coordinates._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + scoping_on_coordinates._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def coordinates(self) -> Input: + def coordinates(self) -> Input[Field]: r"""Allows to connect coordinates input to the operator. Returns @@ -181,7 +189,7 @@ def coordinates(self) -> Input: return self._coordinates @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -214,11 +222,13 @@ class OutputsScopingOnCoordinates(_Outputs): def __init__(self, op: Operator): super().__init__(scoping_on_coordinates._spec().outputs, op) - self._scoping = Output(scoping_on_coordinates._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + scoping_on_coordinates._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mapping/solid_to_skin.py b/src/ansys/dpf/core/operators/mapping/solid_to_skin.py index c9ab5a7aaf6..83590052469 100644 --- a/src/ansys/dpf/core/operators/mapping/solid_to_skin.py +++ b/src/ansys/dpf/core/operators/mapping/solid_to_skin.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class solid_to_skin(Operator): r"""Maps a field defined on solid elements to a field defined on skin @@ -185,15 +191,21 @@ class InputsSolidToSkin(_Inputs): def __init__(self, op: Operator): super().__init__(solid_to_skin._spec().inputs, op) - self._field = Input(solid_to_skin._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + solid_to_skin._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh = Input(solid_to_skin._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion] = Input( + solid_to_skin._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._solid_mesh = Input(solid_to_skin._spec().input_pin(2), 2, op, -1) + self._solid_mesh: Input[MeshedRegion] = Input( + solid_to_skin._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._solid_mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -214,7 +226,7 @@ def field(self) -> Input: return self._field @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. skin mesh region expected @@ -235,7 +247,7 @@ def mesh(self) -> Input: return self._mesh @property - def solid_mesh(self) -> Input: + def solid_mesh(self) -> Input[MeshedRegion]: r"""Allows to connect solid_mesh input to the operator. Solid mesh support (optional). @@ -270,11 +282,11 @@ class OutputsSolidToSkin(_Outputs): def __init__(self, op: Operator): super().__init__(solid_to_skin._spec().outputs, op) - self._field = Output(solid_to_skin._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(solid_to_skin._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mapping/solid_to_skin_fc.py b/src/ansys/dpf/core/operators/mapping/solid_to_skin_fc.py index 07bcecc6be2..1b6b227b22d 100644 --- a/src/ansys/dpf/core/operators/mapping/solid_to_skin_fc.py +++ b/src/ansys/dpf/core/operators/mapping/solid_to_skin_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class solid_to_skin_fc(Operator): r"""Maps a fields container defined on solid elements to a fields container @@ -189,15 +194,21 @@ class InputsSolidToSkinFc(_Inputs): def __init__(self, op: Operator): super().__init__(solid_to_skin_fc._spec().inputs, op) - self._fields_container = Input(solid_to_skin_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + solid_to_skin_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(solid_to_skin_fc._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion] = Input( + solid_to_skin_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._solid_mesh = Input(solid_to_skin_fc._spec().input_pin(2), 2, op, -1) + self._solid_mesh: Input[MeshedRegion] = Input( + solid_to_skin_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._solid_mesh) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -216,7 +227,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. skin mesh region expected @@ -237,7 +248,7 @@ def mesh(self) -> Input: return self._mesh @property - def solid_mesh(self) -> Input: + def solid_mesh(self) -> Input[MeshedRegion]: r"""Allows to connect solid_mesh input to the operator. Solid mesh support (optional). @@ -272,11 +283,13 @@ class OutputsSolidToSkinFc(_Outputs): def __init__(self, op: Operator): super().__init__(solid_to_skin_fc._spec().outputs, op) - self._fields_container = Output(solid_to_skin_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + solid_to_skin_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/absolute_value_by_component.py b/src/ansys/dpf/core/operators/math/absolute_value_by_component.py index 1e098e2d486..122c7087da8 100644 --- a/src/ansys/dpf/core/operators/math/absolute_value_by_component.py +++ b/src/ansys/dpf/core/operators/math/absolute_value_by_component.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class absolute_value_by_component(Operator): r"""Compute the absolute value of each data value of the input field, no @@ -152,11 +157,13 @@ class InputsAbsoluteValueByComponent(_Inputs): def __init__(self, op: Operator): super().__init__(absolute_value_by_component._spec().inputs, op) - self._field = Input(absolute_value_by_component._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer | float] = Input( + absolute_value_by_component._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -191,11 +198,13 @@ class OutputsAbsoluteValueByComponent(_Outputs): def __init__(self, op: Operator): super().__init__(absolute_value_by_component._spec().outputs, op) - self._field = Output(absolute_value_by_component._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + absolute_value_by_component._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/absolute_value_by_component_fc.py b/src/ansys/dpf/core/operators/math/absolute_value_by_component_fc.py index 8c523d422af..6b2bb66d7a8 100644 --- a/src/ansys/dpf/core/operators/math/absolute_value_by_component_fc.py +++ b/src/ansys/dpf/core/operators/math/absolute_value_by_component_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class absolute_value_by_component_fc(Operator): r"""Compute the absolute value of each data value of the input field, no @@ -147,13 +151,13 @@ class InputsAbsoluteValueByComponentFc(_Inputs): def __init__(self, op: Operator): super().__init__(absolute_value_by_component_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( absolute_value_by_component_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -188,13 +192,13 @@ class OutputsAbsoluteValueByComponentFc(_Outputs): def __init__(self, op: Operator): super().__init__(absolute_value_by_component_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( absolute_value_by_component_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/accumulate.py b/src/ansys/dpf/core/operators/math/accumulate.py index 925463686a3..aa6274f4b8d 100644 --- a/src/ansys/dpf/core/operators/math/accumulate.py +++ b/src/ansys/dpf/core/operators/math/accumulate.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class accumulate(Operator): r"""Sums all the elementary data of a field to produce one elementary data @@ -190,15 +196,19 @@ class InputsAccumulate(_Inputs): def __init__(self, op: Operator): super().__init__(accumulate._spec().inputs, op) - self._fieldA = Input(accumulate._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + accumulate._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._weights = Input(accumulate._spec().input_pin(1), 1, op, -1) + self._weights: Input[Field] = Input(accumulate._spec().input_pin(1), 1, op, -1) self._inputs.append(self._weights) - self._time_scoping = Input(accumulate._spec().input_pin(2), 2, op, -1) + self._time_scoping: Input[Scoping] = Input( + accumulate._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._time_scoping) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -219,7 +229,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def weights(self) -> Input: + def weights(self) -> Input[Field]: r"""Allows to connect weights input to the operator. Field containing weights, one weight per entity @@ -240,7 +250,7 @@ def weights(self) -> Input: return self._weights @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. time_scoping @@ -287,11 +297,11 @@ class OutputsAccumulate(_Outputs): def __init__(self, op: Operator): super().__init__(accumulate._spec().outputs, op) - self._field = Output(accumulate._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(accumulate._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Field containing the (weighted) sum for each component in an elementary data diff --git a/src/ansys/dpf/core/operators/math/accumulate_fc.py b/src/ansys/dpf/core/operators/math/accumulate_fc.py index 9a92e3985e7..f46ac83d937 100644 --- a/src/ansys/dpf/core/operators/math/accumulate_fc.py +++ b/src/ansys/dpf/core/operators/math/accumulate_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class accumulate_fc(Operator): r"""Sums all the elementary data of a field to produce one elementary data @@ -190,15 +196,21 @@ class InputsAccumulateFc(_Inputs): def __init__(self, op: Operator): super().__init__(accumulate_fc._spec().inputs, op) - self._fields_container = Input(accumulate_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + accumulate_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._weights = Input(accumulate_fc._spec().input_pin(1), 1, op, -1) + self._weights: Input[Field] = Input( + accumulate_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._weights) - self._time_scoping = Input(accumulate_fc._spec().input_pin(2), 2, op, -1) + self._time_scoping: Input[Scoping] = Input( + accumulate_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._time_scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -219,7 +231,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def weights(self) -> Input: + def weights(self) -> Input[Field]: r"""Allows to connect weights input to the operator. Field containing weights, one weight per entity @@ -240,7 +252,7 @@ def weights(self) -> Input: return self._weights @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. time_scoping @@ -287,11 +299,13 @@ class OutputsAccumulateFc(_Outputs): def __init__(self, op: Operator): super().__init__(accumulate_fc._spec().outputs, op) - self._fields_container = Output(accumulate_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + accumulate_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Field containing the (weighted) sum for each component in an elementary data diff --git a/src/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py b/src/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py index 85e8dd0d877..fb5fecbc2e6 100644 --- a/src/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py +++ b/src/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class accumulate_level_over_label_fc(Operator): r"""Compute the component-wise sum over all the fields that have the same ID @@ -169,17 +174,17 @@ class InputsAccumulateLevelOverLabelFc(_Inputs): def __init__(self, op: Operator): super().__init__(accumulate_level_over_label_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( accumulate_level_over_label_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._label = Input( + self._label: Input[str] = Input( accumulate_level_over_label_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._label) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -198,7 +203,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label(self) -> Input: + def label(self) -> Input[str]: r"""Allows to connect label input to the operator. Label of the fields container where it should operate. If not defined and the input Fields Container has only one Label, the operation will be done over all the fields. @@ -233,13 +238,13 @@ class OutputsAccumulateLevelOverLabelFc(_Outputs): def __init__(self, op: Operator): super().__init__(accumulate_level_over_label_fc._spec().outputs, op) - self._field = Output( + self._field: Output[Field] = Output( accumulate_level_over_label_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py b/src/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py index 35300321be3..97f31706d2a 100644 --- a/src/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py +++ b/src/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class accumulate_min_over_label_fc(Operator): r"""Compute the component-wise sum over all the fields that have the same ID @@ -167,17 +172,17 @@ class InputsAccumulateMinOverLabelFc(_Inputs): def __init__(self, op: Operator): super().__init__(accumulate_min_over_label_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( accumulate_min_over_label_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._label = Input( + self._label: Input[str] = Input( accumulate_min_over_label_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._label) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -196,7 +201,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label(self) -> Input: + def label(self) -> Input[str]: r"""Allows to connect label input to the operator. Label of the fields container where it should operate. If not defined and the input Fields Container has only one Label, the operation will be done over all the fields. @@ -231,11 +236,13 @@ class OutputsAccumulateMinOverLabelFc(_Outputs): def __init__(self, op: Operator): super().__init__(accumulate_min_over_label_fc._spec().outputs, op) - self._field = Output(accumulate_min_over_label_fc._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + accumulate_min_over_label_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/accumulate_over_label_fc.py b/src/ansys/dpf/core/operators/math/accumulate_over_label_fc.py index 7a15927ae57..338f49d27aa 100644 --- a/src/ansys/dpf/core/operators/math/accumulate_over_label_fc.py +++ b/src/ansys/dpf/core/operators/math/accumulate_over_label_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class accumulate_over_label_fc(Operator): r"""Compute the component-wise sum over all the fields that have the same ID @@ -163,15 +168,17 @@ class InputsAccumulateOverLabelFc(_Inputs): def __init__(self, op: Operator): super().__init__(accumulate_over_label_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( accumulate_over_label_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._label = Input(accumulate_over_label_fc._spec().input_pin(1), 1, op, -1) + self._label: Input[str] = Input( + accumulate_over_label_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._label) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -190,7 +197,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label(self) -> Input: + def label(self) -> Input[str]: r"""Allows to connect label input to the operator. Label of the fields container where it should operate. If not defined and the input Fields Container has only one Label, the operation will be done over all the fields. @@ -225,11 +232,13 @@ class OutputsAccumulateOverLabelFc(_Outputs): def __init__(self, op: Operator): super().__init__(accumulate_over_label_fc._spec().outputs, op) - self._field = Output(accumulate_over_label_fc._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + accumulate_over_label_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/accumulation_per_scoping.py b/src/ansys/dpf/core/operators/math/accumulation_per_scoping.py index 202fd1b97f8..0ca3a3ebca9 100644 --- a/src/ansys/dpf/core/operators/math/accumulation_per_scoping.py +++ b/src/ansys/dpf/core/operators/math/accumulation_per_scoping.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class accumulation_per_scoping(Operator): r"""This operator calculates the sum and the percentage of total sum of the @@ -217,29 +225,29 @@ class InputsAccumulationPerScoping(_Inputs): def __init__(self, op: Operator): super().__init__(accumulation_per_scoping._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( accumulation_per_scoping._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( accumulation_per_scoping._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( accumulation_per_scoping._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( accumulation_per_scoping._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._scopings_container = Input( + self._scopings_container: Input[ScopingsContainer] = Input( accumulation_per_scoping._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._scopings_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -258,7 +266,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Master scoping. All scopings in the Scopings Container will be intersected with this scoping. @@ -279,7 +287,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -298,7 +306,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -317,7 +325,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def scopings_container(self) -> Input: + def scopings_container(self) -> Input[ScopingsContainer]: r"""Allows to connect scopings_container input to the operator. The intersection between the of the first will be used. @@ -353,17 +361,17 @@ class OutputsAccumulationPerScoping(_Outputs): def __init__(self, op: Operator): super().__init__(accumulation_per_scoping._spec().outputs, op) - self._accumulation_per_scoping = Output( + self._accumulation_per_scoping: Output[FieldsContainer] = Output( accumulation_per_scoping._spec().output_pin(0), 0, op ) self._outputs.append(self._accumulation_per_scoping) - self._accumulation_per_scoping_percentage = Output( + self._accumulation_per_scoping_percentage: Output[FieldsContainer] = Output( accumulation_per_scoping._spec().output_pin(1), 1, op ) self._outputs.append(self._accumulation_per_scoping_percentage) @property - def accumulation_per_scoping(self) -> Output: + def accumulation_per_scoping(self) -> Output[FieldsContainer]: r"""Allows to get accumulation_per_scoping output of the operator Returns @@ -381,7 +389,7 @@ def accumulation_per_scoping(self) -> Output: return self._accumulation_per_scoping @property - def accumulation_per_scoping_percentage(self) -> Output: + def accumulation_per_scoping_percentage(self) -> Output[FieldsContainer]: r"""Allows to get accumulation_per_scoping_percentage output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/add.py b/src/ansys/dpf/core/operators/math/add.py index f4ba6be92c9..e9124133931 100644 --- a/src/ansys/dpf/core/operators/math/add.py +++ b/src/ansys/dpf/core/operators/math/add.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class add(Operator): r"""Computes the sum of two fields. If one field’s scoping has ‘overall’ @@ -174,13 +179,17 @@ class InputsAdd(_Inputs): def __init__(self, op: Operator): super().__init__(add._spec().inputs, op) - self._fieldA = Input(add._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer | float] = Input( + add._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(add._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer | float] = Input( + add._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -201,7 +210,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -236,11 +245,11 @@ class OutputsAdd(_Outputs): def __init__(self, op: Operator): super().__init__(add._spec().outputs, op) - self._field = Output(add._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(add._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/add_constant.py b/src/ansys/dpf/core/operators/math/add_constant.py index 4c9558f3ae9..3979f2dd882 100644 --- a/src/ansys/dpf/core/operators/math/add_constant.py +++ b/src/ansys/dpf/core/operators/math/add_constant.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class add_constant(Operator): r"""Computes the sum of a field (in 0) and a scalar (in 1). @@ -171,13 +176,17 @@ class InputsAddConstant(_Inputs): def __init__(self, op: Operator): super().__init__(add_constant._spec().inputs, op) - self._field = Input(add_constant._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + add_constant._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._weights = Input(add_constant._spec().input_pin(1), 1, op, -1) + self._weights: Input[float] = Input( + add_constant._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._weights) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -198,7 +207,7 @@ def field(self) -> Input: return self._field @property - def weights(self) -> Input: + def weights(self) -> Input[float]: r"""Allows to connect weights input to the operator. double or vector of double @@ -245,11 +254,11 @@ class OutputsAddConstant(_Outputs): def __init__(self, op: Operator): super().__init__(add_constant._spec().outputs, op) - self._field = Output(add_constant._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(add_constant._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/add_constant_fc.py b/src/ansys/dpf/core/operators/math/add_constant_fc.py index 00d69bbdab6..059d527b1e0 100644 --- a/src/ansys/dpf/core/operators/math/add_constant_fc.py +++ b/src/ansys/dpf/core/operators/math/add_constant_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class add_constant_fc(Operator): r"""Computes the sum of a field (in 0) and a scalar (in 1). @@ -171,13 +175,17 @@ class InputsAddConstantFc(_Inputs): def __init__(self, op: Operator): super().__init__(add_constant_fc._spec().inputs, op) - self._fields_container = Input(add_constant_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + add_constant_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._weights = Input(add_constant_fc._spec().input_pin(1), 1, op, -1) + self._weights: Input[float] = Input( + add_constant_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._weights) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -198,7 +206,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def weights(self) -> Input: + def weights(self) -> Input[float]: r"""Allows to connect weights input to the operator. double or vector of double @@ -245,11 +253,13 @@ class OutputsAddConstantFc(_Outputs): def __init__(self, op: Operator): super().__init__(add_constant_fc._spec().outputs, op) - self._fields_container = Output(add_constant_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + add_constant_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/add_fc.py b/src/ansys/dpf/core/operators/math/add_fc.py index 1e70a6751c2..39a9d47aa5e 100644 --- a/src/ansys/dpf/core/operators/math/add_fc.py +++ b/src/ansys/dpf/core/operators/math/add_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class add_fc(Operator): r"""Selects all fields with the same label space in the input fields @@ -170,13 +175,17 @@ class InputsAddFc(_Inputs): def __init__(self, op: Operator): super().__init__(add_fc._spec().inputs, op) - self._fields_container1 = Input(add_fc._spec().input_pin(0), 0, op, 0) + self._fields_container1: Input[FieldsContainer | Field | float] = Input( + add_fc._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._fields_container1) - self._fields_container2 = Input(add_fc._spec().input_pin(1), 1, op, 1) + self._fields_container2: Input[FieldsContainer | Field | float] = Input( + add_fc._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._fields_container2) @property - def fields_container1(self) -> Input: + def fields_container1(self) -> Input[FieldsContainer | Field | float]: r"""Allows to connect fields_container1 input to the operator. Returns @@ -195,7 +204,7 @@ def fields_container1(self) -> Input: return self._fields_container1 @property - def fields_container2(self) -> Input: + def fields_container2(self) -> Input[FieldsContainer | Field | float]: r"""Allows to connect fields_container2 input to the operator. Returns @@ -228,11 +237,13 @@ class OutputsAddFc(_Outputs): def __init__(self, op: Operator): super().__init__(add_fc._spec().outputs, op) - self._fields_container = Output(add_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + add_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/amplitude.py b/src/ansys/dpf/core/operators/math/amplitude.py index 06ee825831f..e9aeba3b8d2 100644 --- a/src/ansys/dpf/core/operators/math/amplitude.py +++ b/src/ansys/dpf/core/operators/math/amplitude.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class amplitude(Operator): r"""Computes amplitude of a real and an imaginary field. @@ -156,13 +161,17 @@ class InputsAmplitude(_Inputs): def __init__(self, op: Operator): super().__init__(amplitude._spec().inputs, op) - self._fieldA = Input(amplitude._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + amplitude._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(amplitude._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + amplitude._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -183,7 +192,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -218,11 +227,11 @@ class OutputsAmplitude(_Outputs): def __init__(self, op: Operator): super().__init__(amplitude._spec().outputs, op) - self._field = Output(amplitude._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(amplitude._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/amplitude_fc.py b/src/ansys/dpf/core/operators/math/amplitude_fc.py index 167755193e2..cbb8144f41b 100644 --- a/src/ansys/dpf/core/operators/math/amplitude_fc.py +++ b/src/ansys/dpf/core/operators/math/amplitude_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class amplitude_fc(Operator): r"""Computes the amplitude of a real and an imaginary field. @@ -140,11 +144,13 @@ class InputsAmplitudeFc(_Inputs): def __init__(self, op: Operator): super().__init__(amplitude_fc._spec().inputs, op) - self._fields_container = Input(amplitude_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + amplitude_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -177,11 +183,13 @@ class OutputsAmplitudeFc(_Outputs): def __init__(self, op: Operator): super().__init__(amplitude_fc._spec().outputs, op) - self._fields_container = Output(amplitude_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + amplitude_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/average_over_label_fc.py b/src/ansys/dpf/core/operators/math/average_over_label_fc.py index 9fe54b41902..9766c01c0a6 100644 --- a/src/ansys/dpf/core/operators/math/average_over_label_fc.py +++ b/src/ansys/dpf/core/operators/math/average_over_label_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class average_over_label_fc(Operator): r"""Compute the component-wise average over all the fields that have the @@ -163,15 +168,17 @@ class InputsAverageOverLabelFc(_Inputs): def __init__(self, op: Operator): super().__init__(average_over_label_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( average_over_label_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._label = Input(average_over_label_fc._spec().input_pin(1), 1, op, -1) + self._label: Input[str] = Input( + average_over_label_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._label) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -190,7 +197,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label(self) -> Input: + def label(self) -> Input[str]: r"""Allows to connect label input to the operator. Label of the fields container where it should operate. If not defined and the input Fields Container has only one Label, the operation will be done over all the fields. @@ -225,11 +232,13 @@ class OutputsAverageOverLabelFc(_Outputs): def __init__(self, op: Operator): super().__init__(average_over_label_fc._spec().outputs, op) - self._field = Output(average_over_label_fc._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + average_over_label_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/centroid.py b/src/ansys/dpf/core/operators/math/centroid.py index dc4d46a610d..5f1ac683a2a 100644 --- a/src/ansys/dpf/core/operators/math/centroid.py +++ b/src/ansys/dpf/core/operators/math/centroid.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class centroid(Operator): r"""Computes centroid of field1 and field2, using fieldOut = @@ -173,15 +178,19 @@ class InputsCentroid(_Inputs): def __init__(self, op: Operator): super().__init__(centroid._spec().inputs, op) - self._fieldA = Input(centroid._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + centroid._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(centroid._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + centroid._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) - self._factor = Input(centroid._spec().input_pin(2), 2, op, -1) + self._factor: Input[float] = Input(centroid._spec().input_pin(2), 2, op, -1) self._inputs.append(self._factor) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -202,7 +211,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -223,7 +232,7 @@ def fieldB(self) -> Input: return self._fieldB @property - def factor(self) -> Input: + def factor(self) -> Input[float]: r"""Allows to connect factor input to the operator. Scalar @@ -258,11 +267,11 @@ class OutputsCentroid(_Outputs): def __init__(self, op: Operator): super().__init__(centroid._spec().outputs, op) - self._field = Output(centroid._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(centroid._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/centroid_fc.py b/src/ansys/dpf/core/operators/math/centroid_fc.py index 1520bd8ca8c..5f8c1164dcb 100644 --- a/src/ansys/dpf/core/operators/math/centroid_fc.py +++ b/src/ansys/dpf/core/operators/math/centroid_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class centroid_fc(Operator): r"""Computes the centroid of all the matching fields of a fields container @@ -194,17 +199,23 @@ class InputsCentroidFc(_Inputs): def __init__(self, op: Operator): super().__init__(centroid_fc._spec().inputs, op) - self._fields_container = Input(centroid_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + centroid_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._time_freq = Input(centroid_fc._spec().input_pin(1), 1, op, -1) + self._time_freq: Input[float] = Input( + centroid_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._time_freq) - self._step = Input(centroid_fc._spec().input_pin(2), 2, op, -1) + self._step: Input[int] = Input(centroid_fc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._step) - self._time_freq_support = Input(centroid_fc._spec().input_pin(8), 8, op, -1) + self._time_freq_support: Input[TimeFreqSupport] = Input( + centroid_fc._spec().input_pin(8), 8, op, -1 + ) self._inputs.append(self._time_freq_support) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -223,7 +234,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def time_freq(self) -> Input: + def time_freq(self) -> Input[float]: r"""Allows to connect time_freq input to the operator. Returns @@ -242,7 +253,7 @@ def time_freq(self) -> Input: return self._time_freq @property - def step(self) -> Input: + def step(self) -> Input[int]: r"""Allows to connect step input to the operator. Returns @@ -261,7 +272,7 @@ def step(self) -> Input: return self._step @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -294,11 +305,13 @@ class OutputsCentroidFc(_Outputs): def __init__(self, op: Operator): super().__init__(centroid_fc._spec().outputs, op) - self._fields_container = Output(centroid_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + centroid_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/component_wise_divide.py b/src/ansys/dpf/core/operators/math/component_wise_divide.py index ca38ce1fe2b..d672893bdb1 100644 --- a/src/ansys/dpf/core/operators/math/component_wise_divide.py +++ b/src/ansys/dpf/core/operators/math/component_wise_divide.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class component_wise_divide(Operator): r"""Computes component-wise fraction between two fields of same @@ -164,13 +169,17 @@ class InputsComponentWiseDivide(_Inputs): def __init__(self, op: Operator): super().__init__(component_wise_divide._spec().inputs, op) - self._fieldA = Input(component_wise_divide._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + component_wise_divide._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(component_wise_divide._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + component_wise_divide._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -191,7 +200,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -226,11 +235,13 @@ class OutputsComponentWiseDivide(_Outputs): def __init__(self, op: Operator): super().__init__(component_wise_divide._spec().outputs, op) - self._field = Output(component_wise_divide._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + component_wise_divide._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/component_wise_divide_fc.py b/src/ansys/dpf/core/operators/math/component_wise_divide_fc.py index 5f28f9c3319..bb0c374763e 100644 --- a/src/ansys/dpf/core/operators/math/component_wise_divide_fc.py +++ b/src/ansys/dpf/core/operators/math/component_wise_divide_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class component_wise_divide_fc(Operator): r"""For every two fields with the same label space (from the two input @@ -164,17 +168,17 @@ class InputsComponentWiseDivideFc(_Inputs): def __init__(self, op: Operator): super().__init__(component_wise_divide_fc._spec().inputs, op) - self._fields_containerA = Input( + self._fields_containerA: Input[FieldsContainer] = Input( component_wise_divide_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input( + self._fields_containerB: Input[FieldsContainer] = Input( component_wise_divide_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._fields_containerB) @property - def fields_containerA(self) -> Input: + def fields_containerA(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerA input to the operator. Returns @@ -193,7 +197,7 @@ def fields_containerA(self) -> Input: return self._fields_containerA @property - def fields_containerB(self) -> Input: + def fields_containerB(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerB input to the operator. Returns @@ -226,13 +230,13 @@ class OutputsComponentWiseDivideFc(_Outputs): def __init__(self, op: Operator): super().__init__(component_wise_divide_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( component_wise_divide_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/component_wise_product.py b/src/ansys/dpf/core/operators/math/component_wise_product.py index 19ad670f812..049aab7d1d3 100644 --- a/src/ansys/dpf/core/operators/math/component_wise_product.py +++ b/src/ansys/dpf/core/operators/math/component_wise_product.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class component_wise_product(Operator): r"""Computes component-wise product between two fields of same @@ -164,13 +169,17 @@ class InputsComponentWiseProduct(_Inputs): def __init__(self, op: Operator): super().__init__(component_wise_product._spec().inputs, op) - self._fieldA = Input(component_wise_product._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + component_wise_product._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(component_wise_product._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + component_wise_product._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -191,7 +200,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -226,11 +235,13 @@ class OutputsComponentWiseProduct(_Outputs): def __init__(self, op: Operator): super().__init__(component_wise_product._spec().outputs, op) - self._field = Output(component_wise_product._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + component_wise_product._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/component_wise_product_fc.py b/src/ansys/dpf/core/operators/math/component_wise_product_fc.py index 73a94e61254..12a58056d47 100644 --- a/src/ansys/dpf/core/operators/math/component_wise_product_fc.py +++ b/src/ansys/dpf/core/operators/math/component_wise_product_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class component_wise_product_fc(Operator): r"""Computes component-wise product between two fields of same @@ -164,15 +169,17 @@ class InputsComponentWiseProductFc(_Inputs): def __init__(self, op: Operator): super().__init__(component_wise_product_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( component_wise_product_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._fieldB = Input(component_wise_product_fc._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + component_wise_product_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -193,7 +200,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -228,13 +235,13 @@ class OutputsComponentWiseProductFc(_Outputs): def __init__(self, op: Operator): super().__init__(component_wise_product_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( component_wise_product_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/compute_residual_and_error.py b/src/ansys/dpf/core/operators/math/compute_residual_and_error.py index d4157579a9c..e532f4fa37c 100644 --- a/src/ansys/dpf/core/operators/math/compute_residual_and_error.py +++ b/src/ansys/dpf/core/operators/math/compute_residual_and_error.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class compute_residual_and_error(Operator): r"""Computes the Lp-norm of a field or a field container. When a second @@ -263,29 +268,29 @@ class InputsComputeResidualAndError(_Inputs): def __init__(self, op: Operator): super().__init__(compute_residual_and_error._spec().inputs, op) - self._field_or_fields_container1 = Input( + self._field_or_fields_container1: Input[Field | FieldsContainer] = Input( compute_residual_and_error._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field_or_fields_container1) - self._normalization_type = Input( + self._normalization_type: Input[int] = Input( compute_residual_and_error._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._normalization_type) - self._norm_calculation_type = Input( + self._norm_calculation_type: Input[int] = Input( compute_residual_and_error._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._norm_calculation_type) - self._field_reference = Input( + self._field_reference: Input[int] = Input( compute_residual_and_error._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._field_reference) - self._field_or_fields_container2 = Input( + self._field_or_fields_container2: Input[Field | FieldsContainer] = Input( compute_residual_and_error._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._field_or_fields_container2) @property - def field_or_fields_container1(self) -> Input: + def field_or_fields_container1(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field_or_fields_container1 input to the operator. field or fields container - compulsory @@ -306,7 +311,7 @@ def field_or_fields_container1(self) -> Input: return self._field_or_fields_container1 @property - def normalization_type(self) -> Input: + def normalization_type(self) -> Input[int]: r"""Allows to connect normalization_type input to the operator. type of normalization applied to the residuals and norm calculation (optional, defaut: absolute): @@ -331,7 +336,7 @@ def normalization_type(self) -> Input: return self._normalization_type @property - def norm_calculation_type(self) -> Input: + def norm_calculation_type(self) -> Input[int]: r"""Allows to connect norm_calculation_type input to the operator. type for norm calculation (optional, default: L2) - It is normalized depending on Pin2 selection @@ -354,7 +359,7 @@ def norm_calculation_type(self) -> Input: return self._norm_calculation_type @property - def field_reference(self) -> Input: + def field_reference(self) -> Input[int]: r"""Allows to connect field_reference input to the operator. Field reference for the normalization step, default: 0 for entry 1, 1 for residuals - optional @@ -375,7 +380,7 @@ def field_reference(self) -> Input: return self._field_reference @property - def field_or_fields_container2(self) -> Input: + def field_or_fields_container2(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field_or_fields_container2 input to the operator. field or fields container of same dimensionality as entry 1 - optional diff --git a/src/ansys/dpf/core/operators/math/conjugate.py b/src/ansys/dpf/core/operators/math/conjugate.py index d5ce6357990..fb9c6183b25 100644 --- a/src/ansys/dpf/core/operators/math/conjugate.py +++ b/src/ansys/dpf/core/operators/math/conjugate.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class conjugate(Operator): r"""Computes element-wise conjugate of field containers containing complex @@ -142,11 +146,13 @@ class InputsConjugate(_Inputs): def __init__(self, op: Operator): super().__init__(conjugate._spec().inputs, op) - self._fields_container = Input(conjugate._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + conjugate._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -179,11 +185,13 @@ class OutputsConjugate(_Outputs): def __init__(self, op: Operator): super().__init__(conjugate._spec().outputs, op) - self._fields_container = Output(conjugate._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + conjugate._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/correlation.py b/src/ansys/dpf/core/operators/math/correlation.py index e8c6660230b..00dbbcf617c 100644 --- a/src/ansys/dpf/core/operators/math/correlation.py +++ b/src/ansys/dpf/core/operators/math/correlation.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class correlation(Operator): r"""Takes two fields and a weighting and computes their correlation: @@ -217,17 +222,25 @@ class InputsCorrelation(_Inputs): def __init__(self, op: Operator): super().__init__(correlation._spec().inputs, op) - self._fieldA = Input(correlation._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | float] = Input( + correlation._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(correlation._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + correlation._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) - self._weights = Input(correlation._spec().input_pin(2), 2, op, -1) + self._weights: Input[Field | FieldsContainer] = Input( + correlation._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._weights) - self._absoluteValue = Input(correlation._spec().input_pin(3), 3, op, -1) + self._absoluteValue: Input[bool] = Input( + correlation._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._absoluteValue) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | float]: r"""Allows to connect fieldA input to the operator. Field a. The reference field. @@ -248,7 +261,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. Field b. If a fields container is provided, correlation is computed for each field. @@ -269,7 +282,7 @@ def fieldB(self) -> Input: return self._fieldB @property - def weights(self) -> Input: + def weights(self) -> Input[Field | FieldsContainer]: r"""Allows to connect weights input to the operator. Field M, optional weighting for correlation computation. @@ -290,7 +303,7 @@ def weights(self) -> Input: return self._weights @property - def absoluteValue(self) -> Input: + def absoluteValue(self) -> Input[bool]: r"""Allows to connect absoluteValue input to the operator. If true, correlation factor is ||aMb||/(||aMa||.||bMb||) @@ -338,13 +351,13 @@ class OutputsCorrelation(_Outputs): def __init__(self, op: Operator): super().__init__(correlation._spec().outputs, op) - self._field = Output(correlation._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(correlation._spec().output_pin(0), 0, op) self._outputs.append(self._field) - self._index = Output(correlation._spec().output_pin(1), 1, op) + self._index: Output[int] = Output(correlation._spec().output_pin(1), 1, op) self._outputs.append(self._index) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Correlation factor for each input field b. @@ -364,7 +377,7 @@ def field(self) -> Output: return self._field @property - def index(self) -> Output: + def index(self) -> Output[int]: r"""Allows to get index output of the operator If several b are provided, this output contains the index of the highest correlation factor. diff --git a/src/ansys/dpf/core/operators/math/cos.py b/src/ansys/dpf/core/operators/math/cos.py index 6a8a0730173..218e7afbe5b 100644 --- a/src/ansys/dpf/core/operators/math/cos.py +++ b/src/ansys/dpf/core/operators/math/cos.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class cos(Operator): r"""Computes element-wise cos(field[i]). @@ -141,11 +146,13 @@ class InputsCos(_Inputs): def __init__(self, op: Operator): super().__init__(cos._spec().inputs, op) - self._field = Input(cos._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + cos._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -180,11 +187,11 @@ class OutputsCos(_Outputs): def __init__(self, op: Operator): super().__init__(cos._spec().outputs, op) - self._field = Output(cos._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(cos._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/cos_fc.py b/src/ansys/dpf/core/operators/math/cos_fc.py index 0a48de07cc6..1855d2240a5 100644 --- a/src/ansys/dpf/core/operators/math/cos_fc.py +++ b/src/ansys/dpf/core/operators/math/cos_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class cos_fc(Operator): r"""Computes element-wise cos(field[i]). @@ -141,11 +145,13 @@ class InputsCosFc(_Inputs): def __init__(self, op: Operator): super().__init__(cos_fc._spec().inputs, op) - self._fields_container = Input(cos_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + cos_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -180,11 +186,13 @@ class OutputsCosFc(_Outputs): def __init__(self, op: Operator): super().__init__(cos_fc._spec().outputs, op) - self._fields_container = Output(cos_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + cos_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/cplx_derive.py b/src/ansys/dpf/core/operators/math/cplx_derive.py index 7d7b4496c9c..93e2dd58a61 100644 --- a/src/ansys/dpf/core/operators/math/cplx_derive.py +++ b/src/ansys/dpf/core/operators/math/cplx_derive.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class cplx_derive(Operator): r"""Derives field containers containing complex fields. @@ -140,11 +144,13 @@ class InputsCplxDerive(_Inputs): def __init__(self, op: Operator): super().__init__(cplx_derive._spec().inputs, op) - self._fields_container = Input(cplx_derive._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + cplx_derive._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -177,11 +183,13 @@ class OutputsCplxDerive(_Outputs): def __init__(self, op: Operator): super().__init__(cplx_derive._spec().outputs, op) - self._fields_container = Output(cplx_derive._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + cplx_derive._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/cplx_divide.py b/src/ansys/dpf/core/operators/math/cplx_divide.py index 3ac51fc6382..0a032490b80 100644 --- a/src/ansys/dpf/core/operators/math/cplx_divide.py +++ b/src/ansys/dpf/core/operators/math/cplx_divide.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class cplx_divide(Operator): r"""Computes division between two field containers containing complex @@ -158,13 +162,17 @@ class InputsCplxDivide(_Inputs): def __init__(self, op: Operator): super().__init__(cplx_divide._spec().inputs, op) - self._fields_containerA = Input(cplx_divide._spec().input_pin(0), 0, op, -1) + self._fields_containerA: Input[FieldsContainer] = Input( + cplx_divide._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(cplx_divide._spec().input_pin(1), 1, op, -1) + self._fields_containerB: Input[FieldsContainer] = Input( + cplx_divide._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fields_containerB) @property - def fields_containerA(self) -> Input: + def fields_containerA(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerA input to the operator. Returns @@ -183,7 +191,7 @@ def fields_containerA(self) -> Input: return self._fields_containerA @property - def fields_containerB(self) -> Input: + def fields_containerB(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerB input to the operator. Returns @@ -216,11 +224,13 @@ class OutputsCplxDivide(_Outputs): def __init__(self, op: Operator): super().__init__(cplx_divide._spec().outputs, op) - self._fields_container = Output(cplx_divide._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + cplx_divide._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/cplx_dot.py b/src/ansys/dpf/core/operators/math/cplx_dot.py index f6223697080..c34eb7ff065 100644 --- a/src/ansys/dpf/core/operators/math/cplx_dot.py +++ b/src/ansys/dpf/core/operators/math/cplx_dot.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class cplx_dot(Operator): r"""Computes product between two field containers containing complex fields. @@ -156,13 +160,17 @@ class InputsCplxDot(_Inputs): def __init__(self, op: Operator): super().__init__(cplx_dot._spec().inputs, op) - self._fields_containerA = Input(cplx_dot._spec().input_pin(0), 0, op, -1) + self._fields_containerA: Input[FieldsContainer] = Input( + cplx_dot._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(cplx_dot._spec().input_pin(1), 1, op, -1) + self._fields_containerB: Input[FieldsContainer] = Input( + cplx_dot._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fields_containerB) @property - def fields_containerA(self) -> Input: + def fields_containerA(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerA input to the operator. Returns @@ -181,7 +189,7 @@ def fields_containerA(self) -> Input: return self._fields_containerA @property - def fields_containerB(self) -> Input: + def fields_containerB(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerB input to the operator. Returns @@ -214,11 +222,13 @@ class OutputsCplxDot(_Outputs): def __init__(self, op: Operator): super().__init__(cplx_dot._spec().outputs, op) - self._fields_container = Output(cplx_dot._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + cplx_dot._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/cplx_multiply.py b/src/ansys/dpf/core/operators/math/cplx_multiply.py index 21569499b62..20524bf76fd 100644 --- a/src/ansys/dpf/core/operators/math/cplx_multiply.py +++ b/src/ansys/dpf/core/operators/math/cplx_multiply.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class cplx_multiply(Operator): r"""Computes multiplication between two field containers containing complex @@ -158,13 +162,17 @@ class InputsCplxMultiply(_Inputs): def __init__(self, op: Operator): super().__init__(cplx_multiply._spec().inputs, op) - self._fields_containerA = Input(cplx_multiply._spec().input_pin(0), 0, op, -1) + self._fields_containerA: Input[FieldsContainer] = Input( + cplx_multiply._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(cplx_multiply._spec().input_pin(1), 1, op, -1) + self._fields_containerB: Input[FieldsContainer] = Input( + cplx_multiply._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fields_containerB) @property - def fields_containerA(self) -> Input: + def fields_containerA(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerA input to the operator. Returns @@ -183,7 +191,7 @@ def fields_containerA(self) -> Input: return self._fields_containerA @property - def fields_containerB(self) -> Input: + def fields_containerB(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerB input to the operator. Returns @@ -216,11 +224,13 @@ class OutputsCplxMultiply(_Outputs): def __init__(self, op: Operator): super().__init__(cplx_multiply._spec().outputs, op) - self._fields_container = Output(cplx_multiply._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + cplx_multiply._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/cross_product.py b/src/ansys/dpf/core/operators/math/cross_product.py index 90704582471..3764779765e 100644 --- a/src/ansys/dpf/core/operators/math/cross_product.py +++ b/src/ansys/dpf/core/operators/math/cross_product.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class cross_product(Operator): r"""Computes the cross product of two vector fields. Fields can have the @@ -168,13 +173,17 @@ class InputsCrossProduct(_Inputs): def __init__(self, op: Operator): super().__init__(cross_product._spec().inputs, op) - self._fieldA = Input(cross_product._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer | float] = Input( + cross_product._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(cross_product._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer | float] = Input( + cross_product._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -195,7 +204,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -230,11 +239,11 @@ class OutputsCrossProduct(_Outputs): def __init__(self, op: Operator): super().__init__(cross_product._spec().outputs, op) - self._field = Output(cross_product._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(cross_product._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/cross_product_fc.py b/src/ansys/dpf/core/operators/math/cross_product_fc.py index d28d02a4c91..a0766236e45 100644 --- a/src/ansys/dpf/core/operators/math/cross_product_fc.py +++ b/src/ansys/dpf/core/operators/math/cross_product_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class cross_product_fc(Operator): r"""Computes the cross product of two vector fields. Fields can have the @@ -174,17 +179,17 @@ class InputsCrossProductFc(_Inputs): def __init__(self, op: Operator): super().__init__(cross_product_fc._spec().inputs, op) - self._field_or_fields_container_A = Input( - cross_product_fc._spec().input_pin(0), 0, op, -1 + self._field_or_fields_container_A: Input[Field | FieldsContainer | float] = ( + Input(cross_product_fc._spec().input_pin(0), 0, op, -1) ) self._inputs.append(self._field_or_fields_container_A) - self._field_or_fields_container_B = Input( - cross_product_fc._spec().input_pin(1), 1, op, -1 + self._field_or_fields_container_B: Input[Field | FieldsContainer | float] = ( + Input(cross_product_fc._spec().input_pin(1), 1, op, -1) ) self._inputs.append(self._field_or_fields_container_B) @property - def field_or_fields_container_A(self) -> Input: + def field_or_fields_container_A(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect field_or_fields_container_A input to the operator. field or fields container with only one field is expected @@ -205,7 +210,7 @@ def field_or_fields_container_A(self) -> Input: return self._field_or_fields_container_A @property - def field_or_fields_container_B(self) -> Input: + def field_or_fields_container_B(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect field_or_fields_container_B input to the operator. field or fields container with only one field is expected @@ -240,11 +245,13 @@ class OutputsCrossProductFc(_Outputs): def __init__(self, op: Operator): super().__init__(cross_product_fc._spec().outputs, op) - self._fields_container = Output(cross_product_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + cross_product_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/dot.py b/src/ansys/dpf/core/operators/math/dot.py index d0942bedf76..2b91e22c982 100644 --- a/src/ansys/dpf/core/operators/math/dot.py +++ b/src/ansys/dpf/core/operators/math/dot.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class dot(Operator): r"""DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element - @@ -166,13 +171,17 @@ class InputsDot(_Inputs): def __init__(self, op: Operator): super().__init__(dot._spec().inputs, op) - self._fieldA = Input(dot._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + dot._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(dot._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + dot._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -193,7 +202,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -228,11 +237,11 @@ class OutputsDot(_Outputs): def __init__(self, op: Operator): super().__init__(dot._spec().outputs, op) - self._field = Output(dot._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(dot._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/dot_tensor.py b/src/ansys/dpf/core/operators/math/dot_tensor.py index 8630431e9f7..2490619992a 100644 --- a/src/ansys/dpf/core/operators/math/dot_tensor.py +++ b/src/ansys/dpf/core/operators/math/dot_tensor.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class dot_tensor(Operator): r"""DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element-wise @@ -158,13 +163,17 @@ class InputsDotTensor(_Inputs): def __init__(self, op: Operator): super().__init__(dot_tensor._spec().inputs, op) - self._fieldA = Input(dot_tensor._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + dot_tensor._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(dot_tensor._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + dot_tensor._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -185,7 +194,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -220,11 +229,11 @@ class OutputsDotTensor(_Outputs): def __init__(self, op: Operator): super().__init__(dot_tensor._spec().outputs, op) - self._field = Output(dot_tensor._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(dot_tensor._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/entity_extractor.py b/src/ansys/dpf/core/operators/math/entity_extractor.py index 0fc4910ace7..8dc2f8b0755 100644 --- a/src/ansys/dpf/core/operators/math/entity_extractor.py +++ b/src/ansys/dpf/core/operators/math/entity_extractor.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class entity_extractor(Operator): r"""Extracts an entity from a field, based on its ID. @@ -154,13 +158,17 @@ class InputsEntityExtractor(_Inputs): def __init__(self, op: Operator): super().__init__(entity_extractor._spec().inputs, op) - self._fieldA = Input(entity_extractor._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field] = Input( + entity_extractor._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._scalar_int = Input(entity_extractor._spec().input_pin(1), 1, op, -1) + self._scalar_int: Input[int] = Input( + entity_extractor._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scalar_int) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field]: r"""Allows to connect fieldA input to the operator. Returns @@ -179,7 +187,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def scalar_int(self) -> Input: + def scalar_int(self) -> Input[int]: r"""Allows to connect scalar_int input to the operator. Returns @@ -212,11 +220,13 @@ class OutputsEntityExtractor(_Outputs): def __init__(self, op: Operator): super().__init__(entity_extractor._spec().outputs, op) - self._field = Output(entity_extractor._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + entity_extractor._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/expansion_psd.py b/src/ansys/dpf/core/operators/math/expansion_psd.py index c8b40239938..5d8d6656c94 100644 --- a/src/ansys/dpf/core/operators/math/expansion_psd.py +++ b/src/ansys/dpf/core/operators/math/expansion_psd.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class expansion_psd(Operator): r"""Computes the PSD response for one-sigma solution. @@ -211,25 +215,29 @@ class InputsExpansionPsd(_Inputs): def __init__(self, op: Operator): super().__init__(expansion_psd._spec().inputs, op) - self._mode_shapes = Input(expansion_psd._spec().input_pin(0), 0, op, -1) + self._mode_shapes: Input[FieldsContainer] = Input( + expansion_psd._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mode_shapes) - self._static_shapes = Input(expansion_psd._spec().input_pin(1), 1, op, -1) + self._static_shapes: Input[FieldsContainer] = Input( + expansion_psd._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._static_shapes) - self._rel_rel_covar_matrix = Input( + self._rel_rel_covar_matrix: Input[FieldsContainer] = Input( expansion_psd._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._rel_rel_covar_matrix) - self._stat_stat_covar_matrix = Input( + self._stat_stat_covar_matrix: Input[FieldsContainer] = Input( expansion_psd._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._stat_stat_covar_matrix) - self._rel_stat_covar_matrix = Input( + self._rel_stat_covar_matrix: Input[FieldsContainer] = Input( expansion_psd._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._rel_stat_covar_matrix) @property - def mode_shapes(self) -> Input: + def mode_shapes(self) -> Input[FieldsContainer]: r"""Allows to connect mode_shapes input to the operator. Fields container containing the mode shapes from modal analysis file: mode shapes for dynamic and pseudo-static displacements @@ -250,7 +258,7 @@ def mode_shapes(self) -> Input: return self._mode_shapes @property - def static_shapes(self) -> Input: + def static_shapes(self) -> Input[FieldsContainer]: r"""Allows to connect static_shapes input to the operator. Fields container containing the static shapes (base excitations) from spectral analysis file @@ -271,7 +279,7 @@ def static_shapes(self) -> Input: return self._static_shapes @property - def rel_rel_covar_matrix(self) -> Input: + def rel_rel_covar_matrix(self) -> Input[FieldsContainer]: r"""Allows to connect rel_rel_covar_matrix input to the operator. Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration mode-mode shapes @@ -292,7 +300,7 @@ def rel_rel_covar_matrix(self) -> Input: return self._rel_rel_covar_matrix @property - def stat_stat_covar_matrix(self) -> Input: + def stat_stat_covar_matrix(self) -> Input[FieldsContainer]: r"""Allows to connect stat_stat_covar_matrix input to the operator. Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration static-static shapes @@ -313,7 +321,7 @@ def stat_stat_covar_matrix(self) -> Input: return self._stat_stat_covar_matrix @property - def rel_stat_covar_matrix(self) -> Input: + def rel_stat_covar_matrix(self) -> Input[FieldsContainer]: r"""Allows to connect rel_stat_covar_matrix input to the operator. Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration mode-static shapes @@ -348,11 +356,13 @@ class OutputsExpansionPsd(_Outputs): def __init__(self, op: Operator): super().__init__(expansion_psd._spec().outputs, op) - self._psd = Output(expansion_psd._spec().output_pin(0), 0, op) + self._psd: Output[FieldsContainer] = Output( + expansion_psd._spec().output_pin(0), 0, op + ) self._outputs.append(self._psd) @property - def psd(self) -> Output: + def psd(self) -> Output[FieldsContainer]: r"""Allows to get psd output of the operator PSD solution per label diff --git a/src/ansys/dpf/core/operators/math/exponential.py b/src/ansys/dpf/core/operators/math/exponential.py index c10da668fd8..22cdf92623e 100644 --- a/src/ansys/dpf/core/operators/math/exponential.py +++ b/src/ansys/dpf/core/operators/math/exponential.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class exponential(Operator): r"""Computes element-wise exp(field[i]). @@ -146,11 +151,13 @@ class InputsExponential(_Inputs): def __init__(self, op: Operator): super().__init__(exponential._spec().inputs, op) - self._field = Input(exponential._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer | float] = Input( + exponential._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -185,11 +192,11 @@ class OutputsExponential(_Outputs): def __init__(self, op: Operator): super().__init__(exponential._spec().outputs, op) - self._field = Output(exponential._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(exponential._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/exponential_fc.py b/src/ansys/dpf/core/operators/math/exponential_fc.py index bfdf9dc055c..9ea077fd765 100644 --- a/src/ansys/dpf/core/operators/math/exponential_fc.py +++ b/src/ansys/dpf/core/operators/math/exponential_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class exponential_fc(Operator): r"""Computes element-wise exp(field[i]). @@ -141,11 +145,13 @@ class InputsExponentialFc(_Inputs): def __init__(self, op: Operator): super().__init__(exponential_fc._spec().inputs, op) - self._fields_container = Input(exponential_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + exponential_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -180,11 +186,13 @@ class OutputsExponentialFc(_Outputs): def __init__(self, op: Operator): super().__init__(exponential_fc._spec().outputs, op) - self._fields_container = Output(exponential_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + exponential_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/fft_approx.py b/src/ansys/dpf/core/operators/math/fft_approx.py index 34706adb981..b3566132012 100644 --- a/src/ansys/dpf/core/operators/math/fft_approx.py +++ b/src/ansys/dpf/core/operators/math/fft_approx.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class fft_approx(Operator): r"""Computes the fitting curve using FFT filtering and cubic fitting in @@ -281,25 +287,39 @@ class InputsFftApprox(_Inputs): def __init__(self, op: Operator): super().__init__(fft_approx._spec().inputs, op) - self._time_scoping = Input(fft_approx._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping] = Input( + fft_approx._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(fft_approx._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( + fft_approx._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._entity_to_fit = Input(fft_approx._spec().input_pin(2), 2, op, -1) + self._entity_to_fit: Input[FieldsContainer] = Input( + fft_approx._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._entity_to_fit) - self._component_number = Input(fft_approx._spec().input_pin(3), 3, op, -1) + self._component_number: Input[int] = Input( + fft_approx._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._component_number) - self._first_derivative = Input(fft_approx._spec().input_pin(4), 4, op, -1) + self._first_derivative: Input[bool] = Input( + fft_approx._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._first_derivative) - self._second_derivative = Input(fft_approx._spec().input_pin(5), 5, op, -1) + self._second_derivative: Input[bool] = Input( + fft_approx._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._second_derivative) - self._fit_data = Input(fft_approx._spec().input_pin(6), 6, op, -1) + self._fit_data: Input[bool] = Input(fft_approx._spec().input_pin(6), 6, op, -1) self._inputs.append(self._fit_data) - self._cutoff_fr = Input(fft_approx._spec().input_pin(7), 7, op, -1) + self._cutoff_fr: Input[float | int] = Input( + fft_approx._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._cutoff_fr) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. A time scoping to rescope / split the fields container given as input. @@ -320,7 +340,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. A space (mesh entities) scoping (or scopings container) to rescope / split the fields container given as input. @@ -341,7 +361,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def entity_to_fit(self) -> Input: + def entity_to_fit(self) -> Input[FieldsContainer]: r"""Allows to connect entity_to_fit input to the operator. Data changing in time to be fitted. @@ -362,7 +382,7 @@ def entity_to_fit(self) -> Input: return self._entity_to_fit @property - def component_number(self) -> Input: + def component_number(self) -> Input[int]: r"""Allows to connect component_number input to the operator. Component number as an integer, for example '0' for X-displacement, '1' for Y-displacement, and so on. @@ -383,7 +403,7 @@ def component_number(self) -> Input: return self._component_number @property - def first_derivative(self) -> Input: + def first_derivative(self) -> Input[bool]: r"""Allows to connect first_derivative input to the operator. Calculate the first derivative (bool). The default is false. @@ -404,7 +424,7 @@ def first_derivative(self) -> Input: return self._first_derivative @property - def second_derivative(self) -> Input: + def second_derivative(self) -> Input[bool]: r"""Allows to connect second_derivative input to the operator. Calculate the second derivative (bool). The default is false. @@ -425,7 +445,7 @@ def second_derivative(self) -> Input: return self._second_derivative @property - def fit_data(self) -> Input: + def fit_data(self) -> Input[bool]: r"""Allows to connect fit_data input to the operator. Calculate the fitted values (bool). The default is false @@ -446,7 +466,7 @@ def fit_data(self) -> Input: return self._fit_data @property - def cutoff_fr(self) -> Input: + def cutoff_fr(self) -> Input[float | int]: r"""Allows to connect cutoff_fr input to the operator. Cutoff frequency. @@ -483,15 +503,21 @@ class OutputsFftApprox(_Outputs): def __init__(self, op: Operator): super().__init__(fft_approx._spec().outputs, op) - self._fitted_entity_y = Output(fft_approx._spec().output_pin(0), 0, op) + self._fitted_entity_y: Output[FieldsContainer] = Output( + fft_approx._spec().output_pin(0), 0, op + ) self._outputs.append(self._fitted_entity_y) - self._first_der_dy = Output(fft_approx._spec().output_pin(1), 1, op) + self._first_der_dy: Output[FieldsContainer] = Output( + fft_approx._spec().output_pin(1), 1, op + ) self._outputs.append(self._first_der_dy) - self._second_der_d2y = Output(fft_approx._spec().output_pin(2), 2, op) + self._second_der_d2y: Output[FieldsContainer] = Output( + fft_approx._spec().output_pin(2), 2, op + ) self._outputs.append(self._second_der_d2y) @property - def fitted_entity_y(self) -> Output: + def fitted_entity_y(self) -> Output[FieldsContainer]: r"""Allows to get fitted_entity_y output of the operator The fitted entity is fitted using FFT along the space scoping (node i: x=time, y=data). Fitted Y is expected to be close to the input data. @@ -511,7 +537,7 @@ def fitted_entity_y(self) -> Output: return self._fitted_entity_y @property - def first_der_dy(self) -> Output: + def first_der_dy(self) -> Output[FieldsContainer]: r"""Allows to get first_der_dy output of the operator The first derivative (dY) from the fitted Y. @@ -531,7 +557,7 @@ def first_der_dy(self) -> Output: return self._first_der_dy @property - def second_der_d2y(self) -> Output: + def second_der_d2y(self) -> Output[FieldsContainer]: r"""Allows to get second_der_d2y output of the operator The second derivative (d2Y) from the fitted Y. diff --git a/src/ansys/dpf/core/operators/math/fft_eval.py b/src/ansys/dpf/core/operators/math/fft_eval.py index 43976542cb4..d44ca97b3f6 100644 --- a/src/ansys/dpf/core/operators/math/fft_eval.py +++ b/src/ansys/dpf/core/operators/math/fft_eval.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + class fft_eval(Operator): r"""Evaluate the fast fourier transforms at a given set of fields. @@ -164,13 +169,15 @@ class InputsFftEval(_Inputs): def __init__(self, op: Operator): super().__init__(fft_eval._spec().inputs, op) - self._field_t = Input(fft_eval._spec().input_pin(0), 0, op, -1) + self._field_t: Input[Field] = Input(fft_eval._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field_t) - self._time_scoping = Input(fft_eval._spec().input_pin(1), 1, op, -1) + self._time_scoping: Input[Scoping] = Input( + fft_eval._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._time_scoping) @property - def field_t(self) -> Input: + def field_t(self) -> Input[Field]: r"""Allows to connect field_t input to the operator. field of values to evaluate @@ -191,7 +198,7 @@ def field_t(self) -> Input: return self._field_t @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. if specified only the results at these set ids are used @@ -227,13 +234,13 @@ class OutputsFftEval(_Outputs): def __init__(self, op: Operator): super().__init__(fft_eval._spec().outputs, op) - self._field = Output(fft_eval._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(fft_eval._spec().output_pin(0), 0, op) self._outputs.append(self._field) - self._offset = Output(fft_eval._spec().output_pin(2), 2, op) + self._offset: Output[Field] = Output(fft_eval._spec().output_pin(2), 2, op) self._outputs.append(self._offset) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns @@ -251,7 +258,7 @@ def field(self) -> Output: return self._field @property - def offset(self) -> Output: + def offset(self) -> Output[Field]: r"""Allows to get offset output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/fft_gradient_eval.py b/src/ansys/dpf/core/operators/math/fft_gradient_eval.py index 183cd636476..d3cca6c6d75 100644 --- a/src/ansys/dpf/core/operators/math/fft_gradient_eval.py +++ b/src/ansys/dpf/core/operators/math/fft_gradient_eval.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class fft_gradient_eval(Operator): r"""Evaluate min max based on the fast fourier transform at a given field, @@ -179,17 +184,21 @@ class InputsFftGradientEval(_Inputs): def __init__(self, op: Operator): super().__init__(fft_gradient_eval._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( fft_gradient_eval._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._time_scoping = Input(fft_gradient_eval._spec().input_pin(1), 1, op, -1) + self._time_scoping: Input[Scoping] = Input( + fft_gradient_eval._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._time_scoping) - self._fs_ratio = Input(fft_gradient_eval._spec().input_pin(2), 2, op, -1) + self._fs_ratio: Input[int] = Input( + fft_gradient_eval._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fs_ratio) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -208,7 +217,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. if specified only the results at these set ids are used @@ -229,7 +238,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def fs_ratio(self) -> Input: + def fs_ratio(self) -> Input[int]: r"""Allows to connect fs_ratio input to the operator. default value = 20 @@ -264,11 +273,13 @@ class OutputsFftGradientEval(_Outputs): def __init__(self, op: Operator): super().__init__(fft_gradient_eval._spec().outputs, op) - self._coefficients = Output(fft_gradient_eval._spec().output_pin(0), 0, op) + self._coefficients: Output[FieldsContainer] = Output( + fft_gradient_eval._spec().output_pin(0), 0, op + ) self._outputs.append(self._coefficients) @property - def coefficients(self) -> Output: + def coefficients(self) -> Output[FieldsContainer]: r"""Allows to get coefficients output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py b/src/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py index d9a5834c04f..80d8c6201e5 100644 --- a/src/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py +++ b/src/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class fft_multi_harmonic_minmax(Operator): r"""Evaluate min max fields on multi harmonic solution. min and max fields @@ -293,45 +298,45 @@ class InputsFftMultiHarmonicMinmax(_Inputs): def __init__(self, op: Operator): super().__init__(fft_multi_harmonic_minmax._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( fft_multi_harmonic_minmax._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._rpm_scoping = Input( + self._rpm_scoping: Input[Scoping] = Input( fft_multi_harmonic_minmax._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._rpm_scoping) - self._fs_ratio = Input( + self._fs_ratio: Input[int] = Input( fft_multi_harmonic_minmax._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fs_ratio) - self._num_subdivisions = Input( + self._num_subdivisions: Input[int] = Input( fft_multi_harmonic_minmax._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._num_subdivisions) - self._max_num_subdivisions = Input( + self._max_num_subdivisions: Input[int] = Input( fft_multi_harmonic_minmax._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._max_num_subdivisions) - self._num_cycles = Input( + self._num_cycles: Input[int] = Input( fft_multi_harmonic_minmax._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._num_cycles) - self._use_harmonic_zero = Input( + self._use_harmonic_zero: Input[bool] = Input( fft_multi_harmonic_minmax._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._use_harmonic_zero) - self._calculate_time_series = Input( + self._calculate_time_series: Input[bool] = Input( fft_multi_harmonic_minmax._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._calculate_time_series) - self._substeps_selector = Input( + self._substeps_selector: Input = Input( fft_multi_harmonic_minmax._spec().input_pin(8), 8, op, -1 ) self._inputs.append(self._substeps_selector) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -350,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def rpm_scoping(self) -> Input: + def rpm_scoping(self) -> Input[Scoping]: r"""Allows to connect rpm_scoping input to the operator. rpm scoping, by default the fourier series sum is evaluated using all the rpms @@ -371,7 +376,7 @@ def rpm_scoping(self) -> Input: return self._rpm_scoping @property - def fs_ratio(self) -> Input: + def fs_ratio(self) -> Input[int]: r"""Allows to connect fs_ratio input to the operator. field or fields container with only one field is expected @@ -392,7 +397,7 @@ def fs_ratio(self) -> Input: return self._fs_ratio @property - def num_subdivisions(self) -> Input: + def num_subdivisions(self) -> Input[int]: r"""Allows to connect num_subdivisions input to the operator. connect number subdivisions, used for uniform discretization @@ -413,7 +418,7 @@ def num_subdivisions(self) -> Input: return self._num_subdivisions @property - def max_num_subdivisions(self) -> Input: + def max_num_subdivisions(self) -> Input[int]: r"""Allows to connect max_num_subdivisions input to the operator. connect max number subdivisions, used to avoid huge number of sudivisions @@ -434,7 +439,7 @@ def max_num_subdivisions(self) -> Input: return self._max_num_subdivisions @property - def num_cycles(self) -> Input: + def num_cycles(self) -> Input[int]: r"""Allows to connect num_cycles input to the operator. Number of cycle of the periodic signal (default is 2) @@ -455,7 +460,7 @@ def num_cycles(self) -> Input: return self._num_cycles @property - def use_harmonic_zero(self) -> Input: + def use_harmonic_zero(self) -> Input[bool]: r"""Allows to connect use_harmonic_zero input to the operator. use harmonic zero for first rpm (default is false) @@ -476,7 +481,7 @@ def use_harmonic_zero(self) -> Input: return self._use_harmonic_zero @property - def calculate_time_series(self) -> Input: + def calculate_time_series(self) -> Input[bool]: r"""Allows to connect calculate_time_series input to the operator. calculates time series output (output pin 2), setting it to false enhance performance if only min/max are required (default is true) @@ -534,17 +539,21 @@ class OutputsFftMultiHarmonicMinmax(_Outputs): def __init__(self, op: Operator): super().__init__(fft_multi_harmonic_minmax._spec().outputs, op) - self._field_min = Output(fft_multi_harmonic_minmax._spec().output_pin(0), 0, op) + self._field_min: Output[FieldsContainer] = Output( + fft_multi_harmonic_minmax._spec().output_pin(0), 0, op + ) self._outputs.append(self._field_min) - self._field_max = Output(fft_multi_harmonic_minmax._spec().output_pin(1), 1, op) + self._field_max: Output[FieldsContainer] = Output( + fft_multi_harmonic_minmax._spec().output_pin(1), 1, op + ) self._outputs.append(self._field_max) - self._all_fields = Output( + self._all_fields: Output[FieldsContainer] = Output( fft_multi_harmonic_minmax._spec().output_pin(2), 2, op ) self._outputs.append(self._all_fields) @property - def field_min(self) -> Output: + def field_min(self) -> Output[FieldsContainer]: r"""Allows to get field_min output of the operator Returns @@ -562,7 +571,7 @@ def field_min(self) -> Output: return self._field_min @property - def field_max(self) -> Output: + def field_max(self) -> Output[FieldsContainer]: r"""Allows to get field_max output of the operator Returns @@ -580,7 +589,7 @@ def field_max(self) -> Output: return self._field_max @property - def all_fields(self) -> Output: + def all_fields(self) -> Output[FieldsContainer]: r"""Allows to get all_fields output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/generalized_inner_product.py b/src/ansys/dpf/core/operators/math/generalized_inner_product.py index ba35128a2ee..78ea4fedde2 100644 --- a/src/ansys/dpf/core/operators/math/generalized_inner_product.py +++ b/src/ansys/dpf/core/operators/math/generalized_inner_product.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class generalized_inner_product(Operator): r"""Computes a general notion of inner product between two fields of @@ -168,13 +173,17 @@ class InputsGeneralizedInnerProduct(_Inputs): def __init__(self, op: Operator): super().__init__(generalized_inner_product._spec().inputs, op) - self._fieldA = Input(generalized_inner_product._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer | float] = Input( + generalized_inner_product._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(generalized_inner_product._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer | float] = Input( + generalized_inner_product._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -195,7 +204,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -230,11 +239,13 @@ class OutputsGeneralizedInnerProduct(_Outputs): def __init__(self, op: Operator): super().__init__(generalized_inner_product._spec().outputs, op) - self._field = Output(generalized_inner_product._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + generalized_inner_product._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/generalized_inner_product_fc.py b/src/ansys/dpf/core/operators/math/generalized_inner_product_fc.py index d6274c1114d..a0708c1f41f 100644 --- a/src/ansys/dpf/core/operators/math/generalized_inner_product_fc.py +++ b/src/ansys/dpf/core/operators/math/generalized_inner_product_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class generalized_inner_product_fc(Operator): r"""Computes a general notion of inner product between two fields of @@ -178,17 +183,17 @@ class InputsGeneralizedInnerProductFc(_Inputs): def __init__(self, op: Operator): super().__init__(generalized_inner_product_fc._spec().inputs, op) - self._field_or_fields_container_A = Input( - generalized_inner_product_fc._spec().input_pin(0), 0, op, -1 + self._field_or_fields_container_A: Input[Field | FieldsContainer | float] = ( + Input(generalized_inner_product_fc._spec().input_pin(0), 0, op, -1) ) self._inputs.append(self._field_or_fields_container_A) - self._field_or_fields_container_B = Input( - generalized_inner_product_fc._spec().input_pin(1), 1, op, -1 + self._field_or_fields_container_B: Input[Field | FieldsContainer | float] = ( + Input(generalized_inner_product_fc._spec().input_pin(1), 1, op, -1) ) self._inputs.append(self._field_or_fields_container_B) @property - def field_or_fields_container_A(self) -> Input: + def field_or_fields_container_A(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect field_or_fields_container_A input to the operator. field or fields container with only one field is expected @@ -209,7 +214,7 @@ def field_or_fields_container_A(self) -> Input: return self._field_or_fields_container_A @property - def field_or_fields_container_B(self) -> Input: + def field_or_fields_container_B(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect field_or_fields_container_B input to the operator. field or fields container with only one field is expected @@ -244,13 +249,13 @@ class OutputsGeneralizedInnerProductFc(_Outputs): def __init__(self, op: Operator): super().__init__(generalized_inner_product_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( generalized_inner_product_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/img_part.py b/src/ansys/dpf/core/operators/math/img_part.py index dea6ae9de7f..4c65cbd23d9 100644 --- a/src/ansys/dpf/core/operators/math/img_part.py +++ b/src/ansys/dpf/core/operators/math/img_part.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class img_part(Operator): r"""Extracts element-wise imaginary part of field containers containing @@ -142,11 +146,13 @@ class InputsImgPart(_Inputs): def __init__(self, op: Operator): super().__init__(img_part._spec().inputs, op) - self._fields_container = Input(img_part._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + img_part._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -179,11 +185,13 @@ class OutputsImgPart(_Outputs): def __init__(self, op: Operator): super().__init__(img_part._spec().outputs, op) - self._fields_container = Output(img_part._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + img_part._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/invert.py b/src/ansys/dpf/core/operators/math/invert.py index ab27823ed36..3f9098f0819 100644 --- a/src/ansys/dpf/core/operators/math/invert.py +++ b/src/ansys/dpf/core/operators/math/invert.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class invert(Operator): r"""DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and @@ -143,11 +148,13 @@ class InputsInvert(_Inputs): def __init__(self, op: Operator): super().__init__(invert._spec().inputs, op) - self._field = Input(invert._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + invert._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -182,11 +189,11 @@ class OutputsInvert(_Outputs): def __init__(self, op: Operator): super().__init__(invert._spec().outputs, op) - self._field = Output(invert._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(invert._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/invert_fc.py b/src/ansys/dpf/core/operators/math/invert_fc.py index efa8c50e989..efb6211a0fa 100644 --- a/src/ansys/dpf/core/operators/math/invert_fc.py +++ b/src/ansys/dpf/core/operators/math/invert_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class invert_fc(Operator): r"""DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and @@ -143,11 +147,13 @@ class InputsInvertFc(_Inputs): def __init__(self, op: Operator): super().__init__(invert_fc._spec().inputs, op) - self._fields_container = Input(invert_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + invert_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -182,11 +188,13 @@ class OutputsInvertFc(_Outputs): def __init__(self, op: Operator): super().__init__(invert_fc._spec().outputs, op) - self._fields_container = Output(invert_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + invert_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/kronecker_prod.py b/src/ansys/dpf/core/operators/math/kronecker_prod.py index a6e05b00765..8531952a0ca 100644 --- a/src/ansys/dpf/core/operators/math/kronecker_prod.py +++ b/src/ansys/dpf/core/operators/math/kronecker_prod.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class kronecker_prod(Operator): r"""Computes element-wise Kronecker product between two tensor fields. @@ -156,13 +161,17 @@ class InputsKroneckerProd(_Inputs): def __init__(self, op: Operator): super().__init__(kronecker_prod._spec().inputs, op) - self._fieldA = Input(kronecker_prod._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + kronecker_prod._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(kronecker_prod._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + kronecker_prod._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -183,7 +192,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -218,11 +227,11 @@ class OutputsKroneckerProd(_Outputs): def __init__(self, op: Operator): super().__init__(kronecker_prod._spec().outputs, op) - self._field = Output(kronecker_prod._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(kronecker_prod._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/linear_combination.py b/src/ansys/dpf/core/operators/math/linear_combination.py index d0452e5f286..0dee063b38d 100644 --- a/src/ansys/dpf/core/operators/math/linear_combination.py +++ b/src/ansys/dpf/core/operators/math/linear_combination.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class linear_combination(Operator): r"""Computes aXY + bZ where a,b (in 0, in 3) are scalar and X,Y,Z (in 1,2,4) @@ -209,25 +213,29 @@ class InputsLinearCombination(_Inputs): def __init__(self, op: Operator): super().__init__(linear_combination._spec().inputs, op) - self._a = Input(linear_combination._spec().input_pin(0), 0, op, -1) + self._a: Input[float] = Input( + linear_combination._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._a) - self._fields_containerA = Input( + self._fields_containerA: Input[FieldsContainer] = Input( linear_combination._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input( + self._fields_containerB: Input[FieldsContainer] = Input( linear_combination._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_containerB) - self._b = Input(linear_combination._spec().input_pin(3), 3, op, -1) + self._b: Input[float] = Input( + linear_combination._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._b) - self._fields_containerC = Input( + self._fields_containerC: Input[FieldsContainer] = Input( linear_combination._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._fields_containerC) @property - def a(self) -> Input: + def a(self) -> Input[float]: r"""Allows to connect a input to the operator. Double @@ -248,7 +256,7 @@ def a(self) -> Input: return self._a @property - def fields_containerA(self) -> Input: + def fields_containerA(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerA input to the operator. Returns @@ -267,7 +275,7 @@ def fields_containerA(self) -> Input: return self._fields_containerA @property - def fields_containerB(self) -> Input: + def fields_containerB(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerB input to the operator. Returns @@ -286,7 +294,7 @@ def fields_containerB(self) -> Input: return self._fields_containerB @property - def b(self) -> Input: + def b(self) -> Input[float]: r"""Allows to connect b input to the operator. Double @@ -307,7 +315,7 @@ def b(self) -> Input: return self._b @property - def fields_containerC(self) -> Input: + def fields_containerC(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerC input to the operator. Returns @@ -340,11 +348,13 @@ class OutputsLinearCombination(_Outputs): def __init__(self, op: Operator): super().__init__(linear_combination._spec().outputs, op) - self._fields_container = Output(linear_combination._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + linear_combination._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/ln.py b/src/ansys/dpf/core/operators/math/ln.py index 326e760242e..0b9f2729ae6 100644 --- a/src/ansys/dpf/core/operators/math/ln.py +++ b/src/ansys/dpf/core/operators/math/ln.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class ln(Operator): r"""Computes element-wise ln(field[i]). @@ -146,11 +151,13 @@ class InputsLn(_Inputs): def __init__(self, op: Operator): super().__init__(ln._spec().inputs, op) - self._field = Input(ln._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer | float] = Input( + ln._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -185,11 +192,11 @@ class OutputsLn(_Outputs): def __init__(self, op: Operator): super().__init__(ln._spec().outputs, op) - self._field = Output(ln._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(ln._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/ln_fc.py b/src/ansys/dpf/core/operators/math/ln_fc.py index ee0fbc9f4ff..b266ba835e2 100644 --- a/src/ansys/dpf/core/operators/math/ln_fc.py +++ b/src/ansys/dpf/core/operators/math/ln_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class ln_fc(Operator): r"""Computes element-wise ln(field[i]). @@ -141,11 +145,13 @@ class InputsLnFc(_Inputs): def __init__(self, op: Operator): super().__init__(ln_fc._spec().inputs, op) - self._fields_container = Input(ln_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + ln_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -180,11 +186,13 @@ class OutputsLnFc(_Outputs): def __init__(self, op: Operator): super().__init__(ln_fc._spec().outputs, op) - self._fields_container = Output(ln_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + ln_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/mac.py b/src/ansys/dpf/core/operators/math/mac.py index 74a38531e1b..33b52fd3eff 100644 --- a/src/ansys/dpf/core/operators/math/mac.py +++ b/src/ansys/dpf/core/operators/math/mac.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class mac(Operator): r"""Computes MAC Matrix between two fields container, both for real and @@ -194,15 +199,19 @@ class InputsMac(_Inputs): def __init__(self, op: Operator): super().__init__(mac._spec().inputs, op) - self._fields_containerA = Input(mac._spec().input_pin(0), 0, op, -1) + self._fields_containerA: Input[FieldsContainer] = Input( + mac._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(mac._spec().input_pin(1), 1, op, -1) + self._fields_containerB: Input[FieldsContainer] = Input( + mac._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fields_containerB) - self._weights = Input(mac._spec().input_pin(2), 2, op, -1) + self._weights: Input[Field] = Input(mac._spec().input_pin(2), 2, op, -1) self._inputs.append(self._weights) @property - def fields_containerA(self) -> Input: + def fields_containerA(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerA input to the operator. Fields Container A. @@ -223,7 +232,7 @@ def fields_containerA(self) -> Input: return self._fields_containerA @property - def fields_containerB(self) -> Input: + def fields_containerB(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerB input to the operator. Fields Container B. @@ -244,7 +253,7 @@ def fields_containerB(self) -> Input: return self._fields_containerB @property - def weights(self) -> Input: + def weights(self) -> Input[Field]: r"""Allows to connect weights input to the operator. Field M, optional weighting for MAC Matrix computation. @@ -291,11 +300,11 @@ class OutputsMac(_Outputs): def __init__(self, op: Operator): super().__init__(mac._spec().outputs, op) - self._field = Output(mac._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(mac._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator MAC Matrix for all the combinations between mode fields of Field Container A and Field Container B. Results listed row by row. diff --git a/src/ansys/dpf/core/operators/math/make_one_on_comp.py b/src/ansys/dpf/core/operators/math/make_one_on_comp.py index f14b9bce1c8..106d1c6e162 100644 --- a/src/ansys/dpf/core/operators/math/make_one_on_comp.py +++ b/src/ansys/dpf/core/operators/math/make_one_on_comp.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class make_one_on_comp(Operator): r"""Takes the input field’s scoping and creates a field full of zeros, @@ -156,13 +160,17 @@ class InputsMakeOneOnComp(_Inputs): def __init__(self, op: Operator): super().__init__(make_one_on_comp._spec().inputs, op) - self._fieldA = Input(make_one_on_comp._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field] = Input( + make_one_on_comp._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._scalar_int = Input(make_one_on_comp._spec().input_pin(1), 1, op, -1) + self._scalar_int: Input[int] = Input( + make_one_on_comp._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scalar_int) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field]: r"""Allows to connect fieldA input to the operator. Returns @@ -181,7 +189,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def scalar_int(self) -> Input: + def scalar_int(self) -> Input[int]: r"""Allows to connect scalar_int input to the operator. Returns @@ -214,11 +222,13 @@ class OutputsMakeOneOnComp(_Outputs): def __init__(self, op: Operator): super().__init__(make_one_on_comp._spec().outputs, op) - self._field = Output(make_one_on_comp._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + make_one_on_comp._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/matrix_inverse.py b/src/ansys/dpf/core/operators/math/matrix_inverse.py index 0996a5cdd8c..050469d2d76 100644 --- a/src/ansys/dpf/core/operators/math/matrix_inverse.py +++ b/src/ansys/dpf/core/operators/math/matrix_inverse.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class matrix_inverse(Operator): r"""Computes the complex matrix inverse for each field in the given fields @@ -143,11 +147,13 @@ class InputsMatrixInverse(_Inputs): def __init__(self, op: Operator): super().__init__(matrix_inverse._spec().inputs, op) - self._fields_container = Input(matrix_inverse._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + matrix_inverse._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. fields_container @@ -182,11 +188,13 @@ class OutputsMatrixInverse(_Outputs): def __init__(self, op: Operator): super().__init__(matrix_inverse._spec().outputs, op) - self._fields_container = Output(matrix_inverse._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + matrix_inverse._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/min_max_over_time.py b/src/ansys/dpf/core/operators/math/min_max_over_time.py index 7ac6667f64e..7f0254bc75b 100644 --- a/src/ansys/dpf/core/operators/math/min_max_over_time.py +++ b/src/ansys/dpf/core/operators/math/min_max_over_time.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class min_max_over_time(Operator): r"""Evaluates minimum/maximum over time/frequency. @@ -167,15 +171,17 @@ class InputsMinMaxOverTime(_Inputs): def __init__(self, op: Operator): super().__init__(min_max_over_time._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( min_max_over_time._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._int32 = Input(min_max_over_time._spec().input_pin(5), 5, op, -1) + self._int32: Input[int] = Input( + min_max_over_time._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._int32) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -194,7 +200,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def int32(self) -> Input: + def int32(self) -> Input[int]: r"""Allows to connect int32 input to the operator. Define min or max. @@ -230,13 +236,17 @@ class OutputsMinMaxOverTime(_Outputs): def __init__(self, op: Operator): super().__init__(min_max_over_time._spec().outputs, op) - self._field_container_1 = Output(min_max_over_time._spec().output_pin(0), 0, op) + self._field_container_1: Output[FieldsContainer] = Output( + min_max_over_time._spec().output_pin(0), 0, op + ) self._outputs.append(self._field_container_1) - self._field_container_2 = Output(min_max_over_time._spec().output_pin(1), 1, op) + self._field_container_2: Output[FieldsContainer] = Output( + min_max_over_time._spec().output_pin(1), 1, op + ) self._outputs.append(self._field_container_2) @property - def field_container_1(self) -> Output: + def field_container_1(self) -> Output[FieldsContainer]: r"""Allows to get field_container_1 output of the operator Returns @@ -254,7 +264,7 @@ def field_container_1(self) -> Output: return self._field_container_1 @property - def field_container_2(self) -> Output: + def field_container_2(self) -> Output[FieldsContainer]: r"""Allows to get field_container_2 output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/minus.py b/src/ansys/dpf/core/operators/math/minus.py index 620dc960db5..5b3c5f1a754 100644 --- a/src/ansys/dpf/core/operators/math/minus.py +++ b/src/ansys/dpf/core/operators/math/minus.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class minus(Operator): r"""Computes the difference of two fields. If one field’s scoping has an @@ -172,13 +177,17 @@ class InputsMinus(_Inputs): def __init__(self, op: Operator): super().__init__(minus._spec().inputs, op) - self._fieldA = Input(minus._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer | float] = Input( + minus._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(minus._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer | float] = Input( + minus._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -199,7 +208,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -234,11 +243,11 @@ class OutputsMinus(_Outputs): def __init__(self, op: Operator): super().__init__(minus._spec().outputs, op) - self._field = Output(minus._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(minus._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/minus_fc.py b/src/ansys/dpf/core/operators/math/minus_fc.py index fa0e9ff656c..902efb22e8d 100644 --- a/src/ansys/dpf/core/operators/math/minus_fc.py +++ b/src/ansys/dpf/core/operators/math/minus_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class minus_fc(Operator): r"""Computes the difference of two fields. If one field’s scoping has an @@ -178,17 +183,17 @@ class InputsMinusFc(_Inputs): def __init__(self, op: Operator): super().__init__(minus_fc._spec().inputs, op) - self._field_or_fields_container_A = Input( - minus_fc._spec().input_pin(0), 0, op, -1 + self._field_or_fields_container_A: Input[Field | FieldsContainer | float] = ( + Input(minus_fc._spec().input_pin(0), 0, op, -1) ) self._inputs.append(self._field_or_fields_container_A) - self._field_or_fields_container_B = Input( - minus_fc._spec().input_pin(1), 1, op, -1 + self._field_or_fields_container_B: Input[Field | FieldsContainer | float] = ( + Input(minus_fc._spec().input_pin(1), 1, op, -1) ) self._inputs.append(self._field_or_fields_container_B) @property - def field_or_fields_container_A(self) -> Input: + def field_or_fields_container_A(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect field_or_fields_container_A input to the operator. field or fields container with only one field is expected @@ -209,7 +214,7 @@ def field_or_fields_container_A(self) -> Input: return self._field_or_fields_container_A @property - def field_or_fields_container_B(self) -> Input: + def field_or_fields_container_B(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect field_or_fields_container_B input to the operator. field or fields container with only one field is expected @@ -244,11 +249,13 @@ class OutputsMinusFc(_Outputs): def __init__(self, op: Operator): super().__init__(minus_fc._spec().outputs, op) - self._fields_container = Output(minus_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + minus_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/modal_damping_ratio.py b/src/ansys/dpf/core/operators/math/modal_damping_ratio.py index dd3a7797c68..4b2634b465e 100644 --- a/src/ansys/dpf/core/operators/math/modal_damping_ratio.py +++ b/src/ansys/dpf/core/operators/math/modal_damping_ratio.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class modal_damping_ratio(Operator): r"""Computes damping ratio for each mode shape as X_i = const + ratio_i + @@ -213,17 +217,25 @@ class InputsModalDampingRatio(_Inputs): def __init__(self, op: Operator): super().__init__(modal_damping_ratio._spec().inputs, op) - self._natural_freq = Input(modal_damping_ratio._spec().input_pin(0), 0, op, -1) + self._natural_freq: Input = Input( + modal_damping_ratio._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._natural_freq) - self._const_ratio = Input(modal_damping_ratio._spec().input_pin(1), 1, op, -1) + self._const_ratio: Input[float] = Input( + modal_damping_ratio._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._const_ratio) - self._ratio_by_modes = Input( + self._ratio_by_modes: Input = Input( modal_damping_ratio._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._ratio_by_modes) - self._m_coefficient = Input(modal_damping_ratio._spec().input_pin(3), 3, op, -1) + self._m_coefficient: Input[float] = Input( + modal_damping_ratio._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._m_coefficient) - self._k_coefficient = Input(modal_damping_ratio._spec().input_pin(4), 4, op, -1) + self._k_coefficient: Input[float] = Input( + modal_damping_ratio._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._k_coefficient) @property @@ -248,7 +260,7 @@ def natural_freq(self) -> Input: return self._natural_freq @property - def const_ratio(self) -> Input: + def const_ratio(self) -> Input[float]: r"""Allows to connect const_ratio input to the operator. constant modal damping ratio @@ -290,7 +302,7 @@ def ratio_by_modes(self) -> Input: return self._ratio_by_modes @property - def m_coefficient(self) -> Input: + def m_coefficient(self) -> Input[float]: r"""Allows to connect m_coefficient input to the operator. global mass matrix multiplier @@ -311,7 +323,7 @@ def m_coefficient(self) -> Input: return self._m_coefficient @property - def k_coefficient(self) -> Input: + def k_coefficient(self) -> Input[float]: r"""Allows to connect k_coefficient input to the operator. global stiffness matrix multiplier @@ -346,11 +358,13 @@ class OutputsModalDampingRatio(_Outputs): def __init__(self, op: Operator): super().__init__(modal_damping_ratio._spec().outputs, op) - self._field = Output(modal_damping_ratio._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + modal_damping_ratio._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator field of modal damping ratio. diff --git a/src/ansys/dpf/core/operators/math/modal_superposition.py b/src/ansys/dpf/core/operators/math/modal_superposition.py index eca623d4efa..94acbcc9019 100644 --- a/src/ansys/dpf/core/operators/math/modal_superposition.py +++ b/src/ansys/dpf/core/operators/math/modal_superposition.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class modal_superposition(Operator): r"""Computes the solution in the time/frequency space from a modal solution @@ -218,23 +224,29 @@ class InputsModalSuperposition(_Inputs): def __init__(self, op: Operator): super().__init__(modal_superposition._spec().inputs, op) - self._modal_basis = Input(modal_superposition._spec().input_pin(0), 0, op, -1) + self._modal_basis: Input[FieldsContainer] = Input( + modal_superposition._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._modal_basis) - self._solution_in_modal_space = Input( + self._solution_in_modal_space: Input[FieldsContainer] = Input( modal_superposition._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._solution_in_modal_space) - self._incremental_fc = Input( + self._incremental_fc: Input[FieldsContainer] = Input( modal_superposition._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._incremental_fc) - self._time_scoping = Input(modal_superposition._spec().input_pin(3), 3, op, -1) + self._time_scoping: Input[Scoping] = Input( + modal_superposition._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(modal_superposition._spec().input_pin(4), 4, op, -1) + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( + modal_superposition._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._mesh_scoping) @property - def modal_basis(self) -> Input: + def modal_basis(self) -> Input[FieldsContainer]: r"""Allows to connect modal_basis input to the operator. One field by mode with each field representing a mode shape on nodes or elements. @@ -255,7 +267,7 @@ def modal_basis(self) -> Input: return self._modal_basis @property - def solution_in_modal_space(self) -> Input: + def solution_in_modal_space(self) -> Input[FieldsContainer]: r"""Allows to connect solution_in_modal_space input to the operator. One field by time/frequency with each field having a ponderating coefficient for each mode of the modal_basis pin. @@ -276,7 +288,7 @@ def solution_in_modal_space(self) -> Input: return self._solution_in_modal_space @property - def incremental_fc(self) -> Input: + def incremental_fc(self) -> Input[FieldsContainer]: r"""Allows to connect incremental_fc input to the operator. If a non-empty fields container is introduced, it is modified, and sent to the output, to add the contribution of the requested expansion. The label spaces produced from the multiplication must be the same as the incremental ones. @@ -297,7 +309,7 @@ def incremental_fc(self) -> Input: return self._incremental_fc @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Compute the result on a subset of the time frequency domain defined in the solution_in_modal_space fields container. @@ -318,7 +330,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. Compute the result on a subset of the space domain defined in the modal_basis fields container. @@ -353,13 +365,13 @@ class OutputsModalSuperposition(_Outputs): def __init__(self, op: Operator): super().__init__(modal_superposition._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( modal_superposition._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/modulus.py b/src/ansys/dpf/core/operators/math/modulus.py index 8a5acefd5dd..a2825edea3e 100644 --- a/src/ansys/dpf/core/operators/math/modulus.py +++ b/src/ansys/dpf/core/operators/math/modulus.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class modulus(Operator): r"""Computes element-wise modulus of field containers containing complex @@ -142,11 +146,13 @@ class InputsModulus(_Inputs): def __init__(self, op: Operator): super().__init__(modulus._spec().inputs, op) - self._fields_container = Input(modulus._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + modulus._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -179,11 +185,13 @@ class OutputsModulus(_Outputs): def __init__(self, op: Operator): super().__init__(modulus._spec().outputs, op) - self._fields_container = Output(modulus._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + modulus._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/norm.py b/src/ansys/dpf/core/operators/math/norm.py index b65967ca145..eb34f4eac34 100644 --- a/src/ansys/dpf/core/operators/math/norm.py +++ b/src/ansys/dpf/core/operators/math/norm.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class norm(Operator): r"""Computes the element-wise Lp norm of the field elementary data. Default @@ -158,13 +163,15 @@ class InputsNorm(_Inputs): def __init__(self, op: Operator): super().__init__(norm._spec().inputs, op) - self._field = Input(norm._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + norm._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._scalar_int = Input(norm._spec().input_pin(1), 1, op, -1) + self._scalar_int: Input[int] = Input(norm._spec().input_pin(1), 1, op, -1) self._inputs.append(self._scalar_int) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -185,7 +192,7 @@ def field(self) -> Input: return self._field @property - def scalar_int(self) -> Input: + def scalar_int(self) -> Input[int]: r"""Allows to connect scalar_int input to the operator. Lp normalisation type, p = 1, 2, ...n - Default Lp=L2 @@ -220,11 +227,11 @@ class OutputsNorm(_Outputs): def __init__(self, op: Operator): super().__init__(norm._spec().outputs, op) - self._field = Output(norm._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(norm._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/norm_fc.py b/src/ansys/dpf/core/operators/math/norm_fc.py index 03d1e98f9d2..4e2fe0cb302 100644 --- a/src/ansys/dpf/core/operators/math/norm_fc.py +++ b/src/ansys/dpf/core/operators/math/norm_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class norm_fc(Operator): r"""Computes the element-wise L2 norm of the field elementary data. This @@ -159,13 +163,15 @@ class InputsNormFc(_Inputs): def __init__(self, op: Operator): super().__init__(norm_fc._spec().inputs, op) - self._fields_container = Input(norm_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + norm_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._scalar_int = Input(norm_fc._spec().input_pin(1), 1, op, -1) + self._scalar_int: Input[int] = Input(norm_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._scalar_int) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -184,7 +190,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def scalar_int(self) -> Input: + def scalar_int(self) -> Input[int]: r"""Allows to connect scalar_int input to the operator. Lp normalisation type, p = 1, 2, ...n - Default Lp=2 @@ -219,11 +225,13 @@ class OutputsNormFc(_Outputs): def __init__(self, op: Operator): super().__init__(norm_fc._spec().outputs, op) - self._fields_container = Output(norm_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + norm_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/outer_product.py b/src/ansys/dpf/core/operators/math/outer_product.py index cc81e94b48a..57af7e4d676 100644 --- a/src/ansys/dpf/core/operators/math/outer_product.py +++ b/src/ansys/dpf/core/operators/math/outer_product.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class outer_product(Operator): r"""Computes the outer product of two vector fields. @@ -166,13 +171,17 @@ class InputsOuterProduct(_Inputs): def __init__(self, op: Operator): super().__init__(outer_product._spec().inputs, op) - self._fieldA = Input(outer_product._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer | float] = Input( + outer_product._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(outer_product._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer | float] = Input( + outer_product._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -193,7 +202,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -228,11 +237,11 @@ class OutputsOuterProduct(_Outputs): def __init__(self, op: Operator): super().__init__(outer_product._spec().outputs, op) - self._field = Output(outer_product._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(outer_product._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/overall_dot.py b/src/ansys/dpf/core/operators/math/overall_dot.py index 9c74348aa3f..bccc769efda 100644 --- a/src/ansys/dpf/core/operators/math/overall_dot.py +++ b/src/ansys/dpf/core/operators/math/overall_dot.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class overall_dot(Operator): r"""Computes a dot product between two fields (fields are seen like a single @@ -157,13 +161,13 @@ class InputsOverallDot(_Inputs): def __init__(self, op: Operator): super().__init__(overall_dot._spec().inputs, op) - self._fieldA = Input(overall_dot._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field] = Input(overall_dot._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(overall_dot._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field] = Input(overall_dot._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field]: r"""Allows to connect fieldA input to the operator. Returns @@ -182,7 +186,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field]: r"""Allows to connect fieldB input to the operator. Returns @@ -215,11 +219,11 @@ class OutputsOverallDot(_Outputs): def __init__(self, op: Operator): super().__init__(overall_dot._spec().outputs, op) - self._field = Output(overall_dot._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(overall_dot._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Field defined on over-all location, contains a unique scalar value diff --git a/src/ansys/dpf/core/operators/math/phase.py b/src/ansys/dpf/core/operators/math/phase.py index d2af3639f66..f08890a509e 100644 --- a/src/ansys/dpf/core/operators/math/phase.py +++ b/src/ansys/dpf/core/operators/math/phase.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class phase(Operator): r"""Computes the phase (in rad) between a real and an imaginary field. @@ -156,13 +161,17 @@ class InputsPhase(_Inputs): def __init__(self, op: Operator): super().__init__(phase._spec().inputs, op) - self._fieldA = Input(phase._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + phase._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(phase._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + phase._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -183,7 +192,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -218,11 +227,11 @@ class OutputsPhase(_Outputs): def __init__(self, op: Operator): super().__init__(phase._spec().outputs, op) - self._field = Output(phase._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(phase._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/phase_fc.py b/src/ansys/dpf/core/operators/math/phase_fc.py index 708e188ac53..6db1b86e091 100644 --- a/src/ansys/dpf/core/operators/math/phase_fc.py +++ b/src/ansys/dpf/core/operators/math/phase_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class phase_fc(Operator): r"""Computes phase (in rad) between real and imaginary fields. @@ -140,11 +144,13 @@ class InputsPhaseFc(_Inputs): def __init__(self, op: Operator): super().__init__(phase_fc._spec().inputs, op) - self._fields_container = Input(phase_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + phase_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -177,11 +183,13 @@ class OutputsPhaseFc(_Outputs): def __init__(self, op: Operator): super().__init__(phase_fc._spec().outputs, op) - self._fields_container = Output(phase_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + phase_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/polar_to_cplx.py b/src/ansys/dpf/core/operators/math/polar_to_cplx.py index 7c555c880bb..0cb953ef13d 100644 --- a/src/ansys/dpf/core/operators/math/polar_to_cplx.py +++ b/src/ansys/dpf/core/operators/math/polar_to_cplx.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class polar_to_cplx(Operator): r"""Converts a complex number from polar form to complex. @@ -140,11 +144,13 @@ class InputsPolarToCplx(_Inputs): def __init__(self, op: Operator): super().__init__(polar_to_cplx._spec().inputs, op) - self._fields_container = Input(polar_to_cplx._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + polar_to_cplx._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -177,11 +183,13 @@ class OutputsPolarToCplx(_Outputs): def __init__(self, op: Operator): super().__init__(polar_to_cplx._spec().outputs, op) - self._fields_container = Output(polar_to_cplx._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + polar_to_cplx._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/pow.py b/src/ansys/dpf/core/operators/math/pow.py index 8f56a30434c..bc75e865d4a 100644 --- a/src/ansys/dpf/core/operators/math/pow.py +++ b/src/ansys/dpf/core/operators/math/pow.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class pow(Operator): r"""Computes element-wise field[i]^p. @@ -154,13 +158,13 @@ class InputsPow(_Inputs): def __init__(self, op: Operator): super().__init__(pow._spec().inputs, op) - self._field = Input(pow._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(pow._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._factor = Input(pow._spec().input_pin(1), 1, op, -1) + self._factor: Input[float] = Input(pow._spec().input_pin(1), 1, op, -1) self._inputs.append(self._factor) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -179,7 +183,7 @@ def field(self) -> Input: return self._field @property - def factor(self) -> Input: + def factor(self) -> Input[float]: r"""Allows to connect factor input to the operator. Returns @@ -212,11 +216,11 @@ class OutputsPow(_Outputs): def __init__(self, op: Operator): super().__init__(pow._spec().outputs, op) - self._field = Output(pow._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(pow._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/pow_fc.py b/src/ansys/dpf/core/operators/math/pow_fc.py index 0d30bad2ff2..87c131fa2fe 100644 --- a/src/ansys/dpf/core/operators/math/pow_fc.py +++ b/src/ansys/dpf/core/operators/math/pow_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class pow_fc(Operator): r"""Computes element-wise field[i]^p. @@ -154,13 +158,15 @@ class InputsPowFc(_Inputs): def __init__(self, op: Operator): super().__init__(pow_fc._spec().inputs, op) - self._fields_container = Input(pow_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + pow_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._factor = Input(pow_fc._spec().input_pin(1), 1, op, -1) + self._factor: Input[float] = Input(pow_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._factor) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -179,7 +185,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def factor(self) -> Input: + def factor(self) -> Input[float]: r"""Allows to connect factor input to the operator. Returns @@ -212,11 +218,13 @@ class OutputsPowFc(_Outputs): def __init__(self, op: Operator): super().__init__(pow_fc._spec().outputs, op) - self._fields_container = Output(pow_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + pow_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/qr_solve.py b/src/ansys/dpf/core/operators/math/qr_solve.py index 3fb306b0185..35c553c8fc4 100644 --- a/src/ansys/dpf/core/operators/math/qr_solve.py +++ b/src/ansys/dpf/core/operators/math/qr_solve.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class qr_solve(Operator): r"""Computes the solution using QR factorization. @@ -156,13 +160,17 @@ class InputsQrSolve(_Inputs): def __init__(self, op: Operator): super().__init__(qr_solve._spec().inputs, op) - self._fields_container = Input(qr_solve._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + qr_solve._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._rhs = Input(qr_solve._spec().input_pin(1), 1, op, -1) + self._rhs: Input[FieldsContainer] = Input( + qr_solve._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._rhs) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. fields_container @@ -183,7 +191,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def rhs(self) -> Input: + def rhs(self) -> Input[FieldsContainer]: r"""Allows to connect rhs input to the operator. fields_container @@ -218,11 +226,13 @@ class OutputsQrSolve(_Outputs): def __init__(self, op: Operator): super().__init__(qr_solve._spec().outputs, op) - self._fields_container = Output(qr_solve._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + qr_solve._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/real_part.py b/src/ansys/dpf/core/operators/math/real_part.py index 81097b050b9..654ed9720e6 100644 --- a/src/ansys/dpf/core/operators/math/real_part.py +++ b/src/ansys/dpf/core/operators/math/real_part.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class real_part(Operator): r"""Extracts element-wise real part of field containers containing complex @@ -142,11 +146,13 @@ class InputsRealPart(_Inputs): def __init__(self, op: Operator): super().__init__(real_part._spec().inputs, op) - self._fields_container = Input(real_part._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + real_part._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -179,11 +185,13 @@ class OutputsRealPart(_Outputs): def __init__(self, op: Operator): super().__init__(real_part._spec().outputs, op) - self._fields_container = Output(real_part._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + real_part._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/relative_error.py b/src/ansys/dpf/core/operators/math/relative_error.py index b18b0f3da27..866f1f72903 100644 --- a/src/ansys/dpf/core/operators/math/relative_error.py +++ b/src/ansys/dpf/core/operators/math/relative_error.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class relative_error(Operator): r"""Computes the relative error between a reference scalar field and another @@ -186,13 +192,17 @@ class InputsRelativeError(_Inputs): def __init__(self, op: Operator): super().__init__(relative_error._spec().inputs, op) - self._value = Input(relative_error._spec().input_pin(0), 0, op, -1) + self._value: Input[Field | FieldsContainer | float] = Input( + relative_error._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._value) - self._reference = Input(relative_error._spec().input_pin(1), 1, op, -1) + self._reference: Input[Field | FieldsContainer | float] = Input( + relative_error._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._reference) @property - def value(self) -> Input: + def value(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect value input to the operator. field or fields container with only one field is expected @@ -213,7 +223,7 @@ def value(self) -> Input: return self._value @property - def reference(self) -> Input: + def reference(self) -> Input[Field | FieldsContainer | float]: r"""Allows to connect reference input to the operator. field or fields container with only one field is expected @@ -250,15 +260,19 @@ class OutputsRelativeError(_Outputs): def __init__(self, op: Operator): super().__init__(relative_error._spec().outputs, op) - self._field = Output(relative_error._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(relative_error._spec().output_pin(0), 0, op) self._outputs.append(self._field) - self._zero_ref_scoping = Output(relative_error._spec().output_pin(1), 1, op) + self._zero_ref_scoping: Output[Scoping] = Output( + relative_error._spec().output_pin(1), 1, op + ) self._outputs.append(self._zero_ref_scoping) - self._no_ref_scoping = Output(relative_error._spec().output_pin(2), 2, op) + self._no_ref_scoping: Output[Scoping] = Output( + relative_error._spec().output_pin(2), 2, op + ) self._outputs.append(self._no_ref_scoping) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns @@ -276,7 +290,7 @@ def field(self) -> Output: return self._field @property - def zero_ref_scoping(self) -> Output: + def zero_ref_scoping(self) -> Output[Scoping]: r"""Allows to get zero_ref_scoping output of the operator Ids of entities where reference value is zero. @@ -296,7 +310,7 @@ def zero_ref_scoping(self) -> Output: return self._zero_ref_scoping @property - def no_ref_scoping(self) -> Output: + def no_ref_scoping(self) -> Output[Scoping]: r"""Allows to get no_ref_scoping output of the operator Ids of entities where there are no reference value. diff --git a/src/ansys/dpf/core/operators/math/scale.py b/src/ansys/dpf/core/operators/math/scale.py index 4c95bc1a052..5da8d056ca7 100644 --- a/src/ansys/dpf/core/operators/math/scale.py +++ b/src/ansys/dpf/core/operators/math/scale.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class scale(Operator): r"""Scales a field by a constant factor. This factor can be a scalar or a @@ -209,17 +214,21 @@ class InputsScale(_Inputs): def __init__(self, op: Operator): super().__init__(scale._spec().inputs, op) - self._field = Input(scale._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + scale._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._weights = Input(scale._spec().input_pin(1), 1, op, -1) + self._weights: Input[float | Field] = Input( + scale._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._weights) - self._boolean = Input(scale._spec().input_pin(2), 2, op, -1) + self._boolean: Input[bool] = Input(scale._spec().input_pin(2), 2, op, -1) self._inputs.append(self._boolean) - self._algorithm = Input(scale._spec().input_pin(3), 3, op, -1) + self._algorithm: Input[int] = Input(scale._spec().input_pin(3), 3, op, -1) self._inputs.append(self._algorithm) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -240,7 +249,7 @@ def field(self) -> Input: return self._field @property - def weights(self) -> Input: + def weights(self) -> Input[float | Field]: r"""Allows to connect weights input to the operator. Double/Field/Vector of doubles. When scoped on overall, same value(s) applied on all the data, when scoped elsewhere, corresponding values will be multiplied due to the scoping @@ -261,7 +270,7 @@ def weights(self) -> Input: return self._weights @property - def boolean(self) -> Input: + def boolean(self) -> Input[bool]: r"""Allows to connect boolean input to the operator. Default is false. If set to true, output of scale is made dimensionless @@ -282,7 +291,7 @@ def boolean(self) -> Input: return self._boolean @property - def algorithm(self) -> Input: + def algorithm(self) -> Input[int]: r"""Allows to connect algorithm input to the operator. Default is 0 use mkl. If set to 1, don't @@ -329,11 +338,11 @@ class OutputsScale(_Outputs): def __init__(self, op: Operator): super().__init__(scale._spec().outputs, op) - self._field = Output(scale._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(scale._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/scale_by_field.py b/src/ansys/dpf/core/operators/math/scale_by_field.py index 928d66ea814..b856b466244 100644 --- a/src/ansys/dpf/core/operators/math/scale_by_field.py +++ b/src/ansys/dpf/core/operators/math/scale_by_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class scale_by_field(Operator): r"""DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field @@ -160,13 +165,17 @@ class InputsScaleByField(_Inputs): def __init__(self, op: Operator): super().__init__(scale_by_field._spec().inputs, op) - self._fieldA = Input(scale_by_field._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field | FieldsContainer] = Input( + scale_by_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(scale_by_field._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field | FieldsContainer] = Input( + scale_by_field._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldA input to the operator. field or fields container with only one field is expected @@ -187,7 +196,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fieldB input to the operator. field or fields container with only one field is expected @@ -222,11 +231,11 @@ class OutputsScaleByField(_Outputs): def __init__(self, op: Operator): super().__init__(scale_by_field._spec().outputs, op) - self._field = Output(scale_by_field._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(scale_by_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/scale_by_field_fc.py b/src/ansys/dpf/core/operators/math/scale_by_field_fc.py index a31a6039167..a91dcbe40f5 100644 --- a/src/ansys/dpf/core/operators/math/scale_by_field_fc.py +++ b/src/ansys/dpf/core/operators/math/scale_by_field_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class scale_by_field_fc(Operator): r"""DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field @@ -166,17 +171,17 @@ class InputsScaleByFieldFc(_Inputs): def __init__(self, op: Operator): super().__init__(scale_by_field_fc._spec().inputs, op) - self._field_or_fields_container_A = Input( + self._field_or_fields_container_A: Input[Field | FieldsContainer] = Input( scale_by_field_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field_or_fields_container_A) - self._field_or_fields_container_B = Input( + self._field_or_fields_container_B: Input[Field | FieldsContainer] = Input( scale_by_field_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._field_or_fields_container_B) @property - def field_or_fields_container_A(self) -> Input: + def field_or_fields_container_A(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field_or_fields_container_A input to the operator. field or fields container with only one field is expected @@ -197,7 +202,7 @@ def field_or_fields_container_A(self) -> Input: return self._field_or_fields_container_A @property - def field_or_fields_container_B(self) -> Input: + def field_or_fields_container_B(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field_or_fields_container_B input to the operator. field or fields container with only one field is expected @@ -232,11 +237,13 @@ class OutputsScaleByFieldFc(_Outputs): def __init__(self, op: Operator): super().__init__(scale_by_field_fc._spec().outputs, op) - self._fields_container = Output(scale_by_field_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + scale_by_field_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/scale_fc.py b/src/ansys/dpf/core/operators/math/scale_fc.py index 7dac236c4e8..12d91704184 100644 --- a/src/ansys/dpf/core/operators/math/scale_fc.py +++ b/src/ansys/dpf/core/operators/math/scale_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class scale_fc(Operator): r"""Scales a fields container by a constant factor. This factor can be a @@ -214,17 +219,21 @@ class InputsScaleFc(_Inputs): def __init__(self, op: Operator): super().__init__(scale_fc._spec().inputs, op) - self._fields_container = Input(scale_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + scale_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._weights = Input(scale_fc._spec().input_pin(1), 1, op, -1) + self._weights: Input[float | Field | FieldsContainer] = Input( + scale_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._weights) - self._boolean = Input(scale_fc._spec().input_pin(2), 2, op, -1) + self._boolean: Input[bool] = Input(scale_fc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._boolean) - self._algorithm = Input(scale_fc._spec().input_pin(3), 3, op, -1) + self._algorithm: Input[int] = Input(scale_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._algorithm) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. fields container to be scaled @@ -245,7 +254,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def weights(self) -> Input: + def weights(self) -> Input[float | Field | FieldsContainer]: r"""Allows to connect weights input to the operator. Double/Vector of doubles/Field/FieldsContainer. When scoped on overall, same value(s) applied on all the data, when scoped elsewhere, corresponding values will be multiplied due to the scoping @@ -266,7 +275,7 @@ def weights(self) -> Input: return self._weights @property - def boolean(self) -> Input: + def boolean(self) -> Input[bool]: r"""Allows to connect boolean input to the operator. Default is false. If set to true, output of scale is made dimensionless @@ -287,7 +296,7 @@ def boolean(self) -> Input: return self._boolean @property - def algorithm(self) -> Input: + def algorithm(self) -> Input[int]: r"""Allows to connect algorithm input to the operator. Default is 0 use mkl. If set to 1, don't @@ -334,11 +343,13 @@ class OutputsScaleFc(_Outputs): def __init__(self, op: Operator): super().__init__(scale_fc._spec().outputs, op) - self._fields_container = Output(scale_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + scale_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/sin.py b/src/ansys/dpf/core/operators/math/sin.py index 3d4371f50fd..8b30ceee2a0 100644 --- a/src/ansys/dpf/core/operators/math/sin.py +++ b/src/ansys/dpf/core/operators/math/sin.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class sin(Operator): r"""Computes element-wise sin(field[i]). @@ -140,11 +144,11 @@ class InputsSin(_Inputs): def __init__(self, op: Operator): super().__init__(sin._spec().inputs, op) - self._field = Input(sin._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(sin._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -177,11 +181,11 @@ class OutputsSin(_Outputs): def __init__(self, op: Operator): super().__init__(sin._spec().outputs, op) - self._field = Output(sin._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(sin._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/sin_fc.py b/src/ansys/dpf/core/operators/math/sin_fc.py index 7daa5a8b62f..13f1dccab39 100644 --- a/src/ansys/dpf/core/operators/math/sin_fc.py +++ b/src/ansys/dpf/core/operators/math/sin_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class sin_fc(Operator): r"""Computes element-wise sin(field[i]). @@ -140,11 +144,13 @@ class InputsSinFc(_Inputs): def __init__(self, op: Operator): super().__init__(sin_fc._spec().inputs, op) - self._fields_container = Input(sin_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + sin_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -177,11 +183,13 @@ class OutputsSinFc(_Outputs): def __init__(self, op: Operator): super().__init__(sin_fc._spec().outputs, op) - self._fields_container = Output(sin_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + sin_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/sqr.py b/src/ansys/dpf/core/operators/math/sqr.py index ff30cf6464c..5849c9dc1be 100644 --- a/src/ansys/dpf/core/operators/math/sqr.py +++ b/src/ansys/dpf/core/operators/math/sqr.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class sqr(Operator): r"""Computes element-wise field[i]^2. @@ -141,11 +146,13 @@ class InputsSqr(_Inputs): def __init__(self, op: Operator): super().__init__(sqr._spec().inputs, op) - self._field = Input(sqr._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + sqr._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -180,11 +187,11 @@ class OutputsSqr(_Outputs): def __init__(self, op: Operator): super().__init__(sqr._spec().outputs, op) - self._field = Output(sqr._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(sqr._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/sqr_fc.py b/src/ansys/dpf/core/operators/math/sqr_fc.py index d92d20ea515..e17eaa9f77e 100644 --- a/src/ansys/dpf/core/operators/math/sqr_fc.py +++ b/src/ansys/dpf/core/operators/math/sqr_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class sqr_fc(Operator): r"""Computes element-wise field[i]^2. @@ -141,11 +145,13 @@ class InputsSqrFc(_Inputs): def __init__(self, op: Operator): super().__init__(sqr_fc._spec().inputs, op) - self._fields_container = Input(sqr_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + sqr_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -180,11 +186,13 @@ class OutputsSqrFc(_Outputs): def __init__(self, op: Operator): super().__init__(sqr_fc._spec().outputs, op) - self._fields_container = Output(sqr_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + sqr_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/sqrt.py b/src/ansys/dpf/core/operators/math/sqrt.py index cce91f5c010..9f46189be4c 100644 --- a/src/ansys/dpf/core/operators/math/sqrt.py +++ b/src/ansys/dpf/core/operators/math/sqrt.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class sqrt(Operator): r"""Computes element-wise sqrt(field1). @@ -141,11 +146,13 @@ class InputsSqrt(_Inputs): def __init__(self, op: Operator): super().__init__(sqrt._spec().inputs, op) - self._field = Input(sqrt._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + sqrt._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -180,11 +187,11 @@ class OutputsSqrt(_Outputs): def __init__(self, op: Operator): super().__init__(sqrt._spec().outputs, op) - self._field = Output(sqrt._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(sqrt._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/sqrt_fc.py b/src/ansys/dpf/core/operators/math/sqrt_fc.py index 02e220e286d..7b1a1b70f0e 100644 --- a/src/ansys/dpf/core/operators/math/sqrt_fc.py +++ b/src/ansys/dpf/core/operators/math/sqrt_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class sqrt_fc(Operator): r"""Computes element-wise sqrt(field1). @@ -141,11 +145,13 @@ class InputsSqrtFc(_Inputs): def __init__(self, op: Operator): super().__init__(sqrt_fc._spec().inputs, op) - self._fields_container = Input(sqrt_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + sqrt_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -180,11 +186,13 @@ class OutputsSqrtFc(_Outputs): def __init__(self, op: Operator): super().__init__(sqrt_fc._spec().outputs, op) - self._fields_container = Output(sqrt_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + sqrt_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/svd.py b/src/ansys/dpf/core/operators/math/svd.py index e60d8685194..cc0ed7234fe 100644 --- a/src/ansys/dpf/core/operators/math/svd.py +++ b/src/ansys/dpf/core/operators/math/svd.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class svd(Operator): r"""Computes the matrix singular value decomposition (SVD) for each field in @@ -162,11 +166,13 @@ class InputsSvd(_Inputs): def __init__(self, op: Operator): super().__init__(svd._spec().inputs, op) - self._fields_container = Input(svd._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + svd._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. fields_container @@ -203,15 +209,15 @@ class OutputsSvd(_Outputs): def __init__(self, op: Operator): super().__init__(svd._spec().outputs, op) - self._s_svd = Output(svd._spec().output_pin(0), 0, op) + self._s_svd: Output[FieldsContainer] = Output(svd._spec().output_pin(0), 0, op) self._outputs.append(self._s_svd) - self._u_svd = Output(svd._spec().output_pin(1), 1, op) + self._u_svd: Output[FieldsContainer] = Output(svd._spec().output_pin(1), 1, op) self._outputs.append(self._u_svd) - self._vt_svd = Output(svd._spec().output_pin(2), 2, op) + self._vt_svd: Output[FieldsContainer] = Output(svd._spec().output_pin(2), 2, op) self._outputs.append(self._vt_svd) @property - def s_svd(self) -> Output: + def s_svd(self) -> Output[FieldsContainer]: r"""Allows to get s_svd output of the operator Singular values of the input data, where A=U.S.Vt @@ -231,7 +237,7 @@ def s_svd(self) -> Output: return self._s_svd @property - def u_svd(self) -> Output: + def u_svd(self) -> Output[FieldsContainer]: r"""Allows to get u_svd output of the operator U of the input data, where A=U.S.Vt @@ -251,7 +257,7 @@ def u_svd(self) -> Output: return self._u_svd @property - def vt_svd(self) -> Output: + def vt_svd(self) -> Output[FieldsContainer]: r"""Allows to get vt_svd output of the operator Vt of the input data, where A=U.S.Vt diff --git a/src/ansys/dpf/core/operators/math/sweeping_phase.py b/src/ansys/dpf/core/operators/math/sweeping_phase.py index 23191b1de6b..b625010aea0 100644 --- a/src/ansys/dpf/core/operators/math/sweeping_phase.py +++ b/src/ansys/dpf/core/operators/math/sweeping_phase.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class sweeping_phase(Operator): r"""Shifts the phase of a real and an imaginary field (in 0 and 1) of a @@ -230,23 +235,33 @@ class InputsSweepingPhase(_Inputs): def __init__(self, op: Operator): super().__init__(sweeping_phase._spec().inputs, op) - self._real_field = Input(sweeping_phase._spec().input_pin(0), 0, op, -1) + self._real_field: Input[Field | FieldsContainer] = Input( + sweeping_phase._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._real_field) - self._imaginary_field = Input(sweeping_phase._spec().input_pin(1), 1, op, -1) + self._imaginary_field: Input[Field | FieldsContainer] = Input( + sweeping_phase._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._imaginary_field) - self._angle = Input(sweeping_phase._spec().input_pin(2), 2, op, -1) + self._angle: Input[float] = Input( + sweeping_phase._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._angle) - self._unit_name = Input(sweeping_phase._spec().input_pin(3), 3, op, -1) + self._unit_name: Input[str] = Input( + sweeping_phase._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._unit_name) - self._abs_value = Input(sweeping_phase._spec().input_pin(4), 4, op, -1) + self._abs_value: Input[bool] = Input( + sweeping_phase._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._abs_value) - self._imaginary_part_null = Input( + self._imaginary_part_null: Input[bool] = Input( sweeping_phase._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._imaginary_part_null) @property - def real_field(self) -> Input: + def real_field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect real_field input to the operator. field or fields container with only one field is expected @@ -267,7 +282,7 @@ def real_field(self) -> Input: return self._real_field @property - def imaginary_field(self) -> Input: + def imaginary_field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect imaginary_field input to the operator. field or fields container with only one field is expected @@ -288,7 +303,7 @@ def imaginary_field(self) -> Input: return self._imaginary_field @property - def angle(self) -> Input: + def angle(self) -> Input[float]: r"""Allows to connect angle input to the operator. Returns @@ -307,7 +322,7 @@ def angle(self) -> Input: return self._angle @property - def unit_name(self) -> Input: + def unit_name(self) -> Input[str]: r"""Allows to connect unit_name input to the operator. String Unit. Supported values: "deg" or "rad". Default: "rad". @@ -328,7 +343,7 @@ def unit_name(self) -> Input: return self._unit_name @property - def abs_value(self) -> Input: + def abs_value(self) -> Input[bool]: r"""Allows to connect abs_value input to the operator. Returns @@ -347,7 +362,7 @@ def abs_value(self) -> Input: return self._abs_value @property - def imaginary_part_null(self) -> Input: + def imaginary_part_null(self) -> Input[bool]: r"""Allows to connect imaginary_part_null input to the operator. If the imaginary part field is empty and this pin is true, then the imaginary part is supposed to be 0 (default is false). @@ -382,11 +397,11 @@ class OutputsSweepingPhase(_Outputs): def __init__(self, op: Operator): super().__init__(sweeping_phase._spec().outputs, op) - self._field = Output(sweeping_phase._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(sweeping_phase._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/sweeping_phase_fc.py b/src/ansys/dpf/core/operators/math/sweeping_phase_fc.py index d2d5022b298..17cf019d147 100644 --- a/src/ansys/dpf/core/operators/math/sweeping_phase_fc.py +++ b/src/ansys/dpf/core/operators/math/sweeping_phase_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class sweeping_phase_fc(Operator): r"""Shifts the phase of all the corresponding real and imaginary fields of a @@ -199,19 +203,25 @@ class InputsSweepingPhaseFc(_Inputs): def __init__(self, op: Operator): super().__init__(sweeping_phase_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( sweeping_phase_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._angle = Input(sweeping_phase_fc._spec().input_pin(2), 2, op, -1) + self._angle: Input[float] = Input( + sweeping_phase_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._angle) - self._unit_name = Input(sweeping_phase_fc._spec().input_pin(3), 3, op, -1) + self._unit_name: Input[str] = Input( + sweeping_phase_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._unit_name) - self._abs_value = Input(sweeping_phase_fc._spec().input_pin(4), 4, op, -1) + self._abs_value: Input[bool] = Input( + sweeping_phase_fc._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._abs_value) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -230,7 +240,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def angle(self) -> Input: + def angle(self) -> Input[float]: r"""Allows to connect angle input to the operator. Returns @@ -249,7 +259,7 @@ def angle(self) -> Input: return self._angle @property - def unit_name(self) -> Input: + def unit_name(self) -> Input[str]: r"""Allows to connect unit_name input to the operator. String Unit. Supported values: "deg" or "rad". Default: "rad". @@ -270,7 +280,7 @@ def unit_name(self) -> Input: return self._unit_name @property - def abs_value(self) -> Input: + def abs_value(self) -> Input[bool]: r"""Allows to connect abs_value input to the operator. Returns @@ -303,11 +313,13 @@ class OutputsSweepingPhaseFc(_Outputs): def __init__(self, op: Operator): super().__init__(sweeping_phase_fc._spec().outputs, op) - self._fields_container = Output(sweeping_phase_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + sweeping_phase_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/time_derivation.py b/src/ansys/dpf/core/operators/math/time_derivation.py index 63558325cff..46d683aed8d 100644 --- a/src/ansys/dpf/core/operators/math/time_derivation.py +++ b/src/ansys/dpf/core/operators/math/time_derivation.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class time_derivation(Operator): r"""Derives a field of time varying quantities with respect to time @@ -156,13 +160,17 @@ class InputsTimeDerivation(_Inputs): def __init__(self, op: Operator): super().__init__(time_derivation._spec().inputs, op) - self._field = Input(time_derivation._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input( + time_derivation._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._spline_fitting = Input(time_derivation._spec().input_pin(1), 1, op, -1) + self._spline_fitting: Input[bool] = Input( + time_derivation._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._spline_fitting) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. field @@ -183,7 +191,7 @@ def field(self) -> Input: return self._field @property - def spline_fitting(self) -> Input: + def spline_fitting(self) -> Input[bool]: r"""Allows to connect spline_fitting input to the operator. Uses spline fitting on the input field to compute smooth derivatives @@ -218,11 +226,13 @@ class OutputsTimeDerivation(_Outputs): def __init__(self, op: Operator): super().__init__(time_derivation._spec().outputs, op) - self._field = Output(time_derivation._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + time_derivation._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/time_freq_interpolation.py b/src/ansys/dpf/core/operators/math/time_freq_interpolation.py index 2dd7dac20da..2a08386d1fc 100644 --- a/src/ansys/dpf/core/operators/math/time_freq_interpolation.py +++ b/src/ansys/dpf/core/operators/math/time_freq_interpolation.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class time_freq_interpolation(Operator): r"""Interpolates between all the matching fields of a fields container at @@ -234,31 +240,33 @@ class InputsTimeFreqInterpolation(_Inputs): def __init__(self, op: Operator): super().__init__(time_freq_interpolation._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( time_freq_interpolation._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._time_freq_values = Input( + self._time_freq_values: Input[float | Field] = Input( time_freq_interpolation._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._time_freq_values) - self._step = Input(time_freq_interpolation._spec().input_pin(2), 2, op, -1) + self._step: Input[int] = Input( + time_freq_interpolation._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._step) - self._interpolation_type = Input( + self._interpolation_type: Input[int] = Input( time_freq_interpolation._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._interpolation_type) - self._force_new_time_freq_support = Input( + self._force_new_time_freq_support: Input[bool] = Input( time_freq_interpolation._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._force_new_time_freq_support) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( time_freq_interpolation._spec().input_pin(8), 8, op, -1 ) self._inputs.append(self._time_freq_support) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -277,7 +285,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def time_freq_values(self) -> Input: + def time_freq_values(self) -> Input[float | Field]: r"""Allows to connect time_freq_values input to the operator. list of frequencies or times needed. To specify load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". @@ -298,7 +306,7 @@ def time_freq_values(self) -> Input: return self._time_freq_values @property - def step(self) -> Input: + def step(self) -> Input[int]: r"""Allows to connect step input to the operator. if a Field is set as input, the step ids should be its scoping. @@ -319,7 +327,7 @@ def step(self) -> Input: return self._step @property - def interpolation_type(self) -> Input: + def interpolation_type(self) -> Input[int]: r"""Allows to connect interpolation_type input to the operator. 1 is ramped, 2 is stepped, default is 1. @@ -340,7 +348,7 @@ def interpolation_type(self) -> Input: return self._interpolation_type @property - def force_new_time_freq_support(self) -> Input: + def force_new_time_freq_support(self) -> Input[bool]: r"""Allows to connect force_new_time_freq_support input to the operator. If set to true, the output fields container will always have a new time freq support rescoped to the output time_freq_values (default is false). If set to false, the time freq support is only recreated when time or frequency values are between existing ones. @@ -361,7 +369,7 @@ def force_new_time_freq_support(self) -> Input: return self._force_new_time_freq_support @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -394,13 +402,13 @@ class OutputsTimeFreqInterpolation(_Outputs): def __init__(self, op: Operator): super().__init__(time_freq_interpolation._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( time_freq_interpolation._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/time_integration.py b/src/ansys/dpf/core/operators/math/time_integration.py index 77bcad3444f..6d1a5fcf836 100644 --- a/src/ansys/dpf/core/operators/math/time_integration.py +++ b/src/ansys/dpf/core/operators/math/time_integration.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class time_integration(Operator): r"""Integrates a field of time varying quantities over time @@ -210,23 +214,29 @@ class InputsTimeIntegration(_Inputs): def __init__(self, op: Operator): super().__init__(time_integration._spec().inputs, op) - self._field = Input(time_integration._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input( + time_integration._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._resample_output = Input(time_integration._spec().input_pin(1), 1, op, -1) + self._resample_output: Input[bool] = Input( + time_integration._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._resample_output) - self._absolute_error = Input(time_integration._spec().input_pin(2), 2, op, -1) + self._absolute_error: Input[float] = Input( + time_integration._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._absolute_error) - self._minimum_step_size = Input( + self._minimum_step_size: Input[float] = Input( time_integration._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._minimum_step_size) - self._integration_constant = Input( + self._integration_constant: Input[float] = Input( time_integration._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._integration_constant) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. field @@ -247,7 +257,7 @@ def field(self) -> Input: return self._field @property - def resample_output(self) -> Input: + def resample_output(self) -> Input[bool]: r"""Allows to connect resample_output input to the operator. Resample the output @@ -268,7 +278,7 @@ def resample_output(self) -> Input: return self._resample_output @property - def absolute_error(self) -> Input: + def absolute_error(self) -> Input[float]: r"""Allows to connect absolute_error input to the operator. Absolute error for the resampling @@ -289,7 +299,7 @@ def absolute_error(self) -> Input: return self._absolute_error @property - def minimum_step_size(self) -> Input: + def minimum_step_size(self) -> Input[float]: r"""Allows to connect minimum_step_size input to the operator. Minimum time step size for the resamplig @@ -310,7 +320,7 @@ def minimum_step_size(self) -> Input: return self._minimum_step_size @property - def integration_constant(self) -> Input: + def integration_constant(self) -> Input[float]: r"""Allows to connect integration_constant input to the operator. Constant to be added to the integrated field @@ -345,11 +355,13 @@ class OutputsTimeIntegration(_Outputs): def __init__(self, op: Operator): super().__init__(time_integration._spec().outputs, op) - self._field = Output(time_integration._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + time_integration._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/unit_convert.py b/src/ansys/dpf/core/operators/math/unit_convert.py index 6aa740eed6a..0eb82ce0ed5 100644 --- a/src/ansys/dpf/core/operators/math/unit_convert.py +++ b/src/ansys/dpf/core/operators/math/unit_convert.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + class unit_convert(Operator): r"""Converts an input field/fields container or mesh of a given unit to @@ -171,13 +178,19 @@ class InputsUnitConvert(_Inputs): def __init__(self, op: Operator): super().__init__(unit_convert._spec().inputs, op) - self._entity_to_convert = Input(unit_convert._spec().input_pin(0), 0, op, -1) + self._entity_to_convert: Input[ + Field | FieldsContainer | MeshedRegion | MeshesContainer + ] = Input(unit_convert._spec().input_pin(0), 0, op, -1) self._inputs.append(self._entity_to_convert) - self._unit_name = Input(unit_convert._spec().input_pin(1), 1, op, -1) + self._unit_name: Input[str | int | Field] = Input( + unit_convert._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._unit_name) @property - def entity_to_convert(self) -> Input: + def entity_to_convert( + self, + ) -> Input[Field | FieldsContainer | MeshedRegion | MeshesContainer]: r"""Allows to connect entity_to_convert input to the operator. Returns @@ -196,7 +209,7 @@ def entity_to_convert(self) -> Input: return self._entity_to_convert @property - def unit_name(self) -> Input: + def unit_name(self) -> Input[str | int | Field]: r"""Allows to connect unit_name input to the operator. unit as a string, ex 'm' for meter, 'Pa' for pascal,... Or ansys unit system's ID, or a field from which expected unit will be extracted. diff --git a/src/ansys/dpf/core/operators/math/unit_convert_fc.py b/src/ansys/dpf/core/operators/math/unit_convert_fc.py index ee86af52599..cdfefa48493 100644 --- a/src/ansys/dpf/core/operators/math/unit_convert_fc.py +++ b/src/ansys/dpf/core/operators/math/unit_convert_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class unit_convert_fc(Operator): r"""Converts an input fields container of a given unit to another unit. @@ -155,13 +159,17 @@ class InputsUnitConvertFc(_Inputs): def __init__(self, op: Operator): super().__init__(unit_convert_fc._spec().inputs, op) - self._fields_container = Input(unit_convert_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + unit_convert_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._unit_name = Input(unit_convert_fc._spec().input_pin(1), 1, op, -1) + self._unit_name: Input[str] = Input( + unit_convert_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._unit_name) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -180,7 +188,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def unit_name(self) -> Input: + def unit_name(self) -> Input[str]: r"""Allows to connect unit_name input to the operator. unit as a string, ex 'm' for meter, 'Pa' for pascal,... @@ -215,11 +223,13 @@ class OutputsUnitConvertFc(_Outputs): def __init__(self, op: Operator): super().__init__(unit_convert_fc._spec().outputs, op) - self._fields_container = Output(unit_convert_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + unit_convert_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/window_bartlett.py b/src/ansys/dpf/core/operators/math/window_bartlett.py index ab0f7976208..e31d93f2961 100644 --- a/src/ansys/dpf/core/operators/math/window_bartlett.py +++ b/src/ansys/dpf/core/operators/math/window_bartlett.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class window_bartlett(Operator): r"""Apply bartlett windowing on a given FieldsContainer having time label or @@ -145,11 +150,13 @@ class InputsWindowBartlett(_Inputs): def __init__(self, op: Operator): super().__init__(window_bartlett._spec().inputs, op) - self._field = Input(window_bartlett._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + window_bartlett._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/math/window_bartlett_fc.py b/src/ansys/dpf/core/operators/math/window_bartlett_fc.py index e46a4b2fea6..5ff5ff5bde3 100644 --- a/src/ansys/dpf/core/operators/math/window_bartlett_fc.py +++ b/src/ansys/dpf/core/operators/math/window_bartlett_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class window_bartlett_fc(Operator): r"""Apply bartlett windowing on a given FieldsContainer having time label or @@ -144,13 +148,13 @@ class InputsWindowBartlettFc(_Inputs): def __init__(self, op: Operator): super().__init__(window_bartlett_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( window_bartlett_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -183,11 +187,13 @@ class OutputsWindowBartlettFc(_Outputs): def __init__(self, op: Operator): super().__init__(window_bartlett_fc._spec().outputs, op) - self._fields_container = Output(window_bartlett_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + window_bartlett_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/window_blackman.py b/src/ansys/dpf/core/operators/math/window_blackman.py index d531162b87b..e3fa61b8ab4 100644 --- a/src/ansys/dpf/core/operators/math/window_blackman.py +++ b/src/ansys/dpf/core/operators/math/window_blackman.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class window_blackman(Operator): r"""Apply blackman windowing on a given FieldsContainer having time label or @@ -145,11 +150,13 @@ class InputsWindowBlackman(_Inputs): def __init__(self, op: Operator): super().__init__(window_blackman._spec().inputs, op) - self._field = Input(window_blackman._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + window_blackman._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/math/window_blackman_fc.py b/src/ansys/dpf/core/operators/math/window_blackman_fc.py index 4eb75e6c532..ab23ac0270a 100644 --- a/src/ansys/dpf/core/operators/math/window_blackman_fc.py +++ b/src/ansys/dpf/core/operators/math/window_blackman_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class window_blackman_fc(Operator): r"""Apply blackman windowing on a given FieldsContainer having time label or @@ -144,13 +148,13 @@ class InputsWindowBlackmanFc(_Inputs): def __init__(self, op: Operator): super().__init__(window_blackman_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( window_blackman_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -183,11 +187,13 @@ class OutputsWindowBlackmanFc(_Outputs): def __init__(self, op: Operator): super().__init__(window_blackman_fc._spec().outputs, op) - self._fields_container = Output(window_blackman_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + window_blackman_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/window_hamming.py b/src/ansys/dpf/core/operators/math/window_hamming.py index 233957d1515..1223e992f39 100644 --- a/src/ansys/dpf/core/operators/math/window_hamming.py +++ b/src/ansys/dpf/core/operators/math/window_hamming.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class window_hamming(Operator): r"""Apply hamming windowing on a given FieldsContainer having time label or @@ -145,11 +150,13 @@ class InputsWindowHamming(_Inputs): def __init__(self, op: Operator): super().__init__(window_hamming._spec().inputs, op) - self._field = Input(window_hamming._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + window_hamming._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/math/window_hamming_fc.py b/src/ansys/dpf/core/operators/math/window_hamming_fc.py index 36ab5bbb218..ddfb0e7b89d 100644 --- a/src/ansys/dpf/core/operators/math/window_hamming_fc.py +++ b/src/ansys/dpf/core/operators/math/window_hamming_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class window_hamming_fc(Operator): r"""Apply hamming windowing on a given FieldsContainer having time label or @@ -144,13 +148,13 @@ class InputsWindowHammingFc(_Inputs): def __init__(self, op: Operator): super().__init__(window_hamming_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( window_hamming_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -183,11 +187,13 @@ class OutputsWindowHammingFc(_Outputs): def __init__(self, op: Operator): super().__init__(window_hamming_fc._spec().outputs, op) - self._fields_container = Output(window_hamming_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + window_hamming_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/window_hanning.py b/src/ansys/dpf/core/operators/math/window_hanning.py index 5ba1a4830f3..14baddf140d 100644 --- a/src/ansys/dpf/core/operators/math/window_hanning.py +++ b/src/ansys/dpf/core/operators/math/window_hanning.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class window_hanning(Operator): r"""Apply hanning windowing on a given FieldsContainer having time label or @@ -145,11 +150,13 @@ class InputsWindowHanning(_Inputs): def __init__(self, op: Operator): super().__init__(window_hanning._spec().inputs, op) - self._field = Input(window_hanning._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + window_hanning._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/math/window_hanning_fc.py b/src/ansys/dpf/core/operators/math/window_hanning_fc.py index 6c885aff12b..e7c338a2c1d 100644 --- a/src/ansys/dpf/core/operators/math/window_hanning_fc.py +++ b/src/ansys/dpf/core/operators/math/window_hanning_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class window_hanning_fc(Operator): r"""Apply hanning windowing on a given FieldsContainer having time label or @@ -144,13 +148,13 @@ class InputsWindowHanningFc(_Inputs): def __init__(self, op: Operator): super().__init__(window_hanning_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( window_hanning_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -183,11 +187,13 @@ class OutputsWindowHanningFc(_Outputs): def __init__(self, op: Operator): super().__init__(window_hanning_fc._spec().outputs, op) - self._fields_container = Output(window_hanning_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + window_hanning_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/window_triangular.py b/src/ansys/dpf/core/operators/math/window_triangular.py index 6d1b4ad54ea..cea549a147d 100644 --- a/src/ansys/dpf/core/operators/math/window_triangular.py +++ b/src/ansys/dpf/core/operators/math/window_triangular.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class window_triangular(Operator): r"""Apply triangular windowing on a given FieldsContainer having time label @@ -145,11 +150,13 @@ class InputsWindowTriangular(_Inputs): def __init__(self, op: Operator): super().__init__(window_triangular._spec().inputs, op) - self._field = Input(window_triangular._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + window_triangular._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/math/window_triangular_fc.py b/src/ansys/dpf/core/operators/math/window_triangular_fc.py index e9d27f293a1..7f80ee411e0 100644 --- a/src/ansys/dpf/core/operators/math/window_triangular_fc.py +++ b/src/ansys/dpf/core/operators/math/window_triangular_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class window_triangular_fc(Operator): r"""Apply triangular windowing on a given FieldsContainer having time label @@ -144,13 +148,13 @@ class InputsWindowTriangularFc(_Inputs): def __init__(self, op: Operator): super().__init__(window_triangular_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( window_triangular_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -183,13 +187,13 @@ class OutputsWindowTriangularFc(_Outputs): def __init__(self, op: Operator): super().__init__(window_triangular_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( window_triangular_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/math/window_welch.py b/src/ansys/dpf/core/operators/math/window_welch.py index 98c7f504ae9..09ebbc69580 100644 --- a/src/ansys/dpf/core/operators/math/window_welch.py +++ b/src/ansys/dpf/core/operators/math/window_welch.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class window_welch(Operator): r"""Apply welch windowing on a given FieldsContainer having time label or a @@ -145,11 +150,13 @@ class InputsWindowWelch(_Inputs): def __init__(self, op: Operator): super().__init__(window_welch._spec().inputs, op) - self._field = Input(window_welch._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + window_welch._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/math/window_welch_fc.py b/src/ansys/dpf/core/operators/math/window_welch_fc.py index 8ba72d89f8c..46c3b1b0eee 100644 --- a/src/ansys/dpf/core/operators/math/window_welch_fc.py +++ b/src/ansys/dpf/core/operators/math/window_welch_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class window_welch_fc(Operator): r"""Apply welch windowing on a given FieldsContainer having time label or a @@ -144,11 +148,13 @@ class InputsWindowWelchFc(_Inputs): def __init__(self, op: Operator): super().__init__(window_welch_fc._spec().inputs, op) - self._fields_container = Input(window_welch_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + window_welch_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -181,11 +187,13 @@ class OutputsWindowWelchFc(_Outputs): def __init__(self, op: Operator): super().__init__(window_welch_fc._spec().outputs, op) - self._fields_container = Output(window_welch_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + window_welch_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py b/src/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py index 1dbf93965bf..de47d6f75a2 100644 --- a/src/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py +++ b/src/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshes_container import MeshesContainer + class acmo_mesh_provider(Operator): r"""Converts an Assembly Mesh into a DPF Meshes container @@ -154,9 +158,13 @@ class InputsAcmoMeshProvider(_Inputs): def __init__(self, op: Operator): super().__init__(acmo_mesh_provider._spec().inputs, op) - self._assembly_mesh = Input(acmo_mesh_provider._spec().input_pin(0), 0, op, -1) + self._assembly_mesh: Input = Input( + acmo_mesh_provider._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._assembly_mesh) - self._unit = Input(acmo_mesh_provider._spec().input_pin(1), 1, op, -1) + self._unit: Input[str] = Input( + acmo_mesh_provider._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._unit) @property @@ -179,7 +187,7 @@ def assembly_mesh(self) -> Input: return self._assembly_mesh @property - def unit(self) -> Input: + def unit(self) -> Input[str]: r"""Allows to connect unit input to the operator. Returns @@ -212,11 +220,13 @@ class OutputsAcmoMeshProvider(_Outputs): def __init__(self, op: Operator): super().__init__(acmo_mesh_provider._spec().outputs, op) - self._meshes_container = Output(acmo_mesh_provider._spec().output_pin(0), 0, op) + self._meshes_container: Output[MeshesContainer] = Output( + acmo_mesh_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._meshes_container) @property - def meshes_container(self) -> Output: + def meshes_container(self) -> Output[MeshesContainer]: r"""Allows to get meshes_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/beam_properties.py b/src/ansys/dpf/core/operators/mesh/beam_properties.py index 59e8c487c8e..77214aa555e 100644 --- a/src/ansys/dpf/core/operators/mesh/beam_properties.py +++ b/src/ansys/dpf/core/operators/mesh/beam_properties.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.streams_container import StreamsContainer + class beam_properties(Operator): r"""Reads the beam’s properties from the result files contained in the @@ -276,13 +283,17 @@ class InputsBeamProperties(_Inputs): def __init__(self, op: Operator): super().__init__(beam_properties._spec().inputs, op) - self._streams = Input(beam_properties._spec().input_pin(3), 3, op, -1) + self._streams: Input[StreamsContainer] = Input( + beam_properties._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams) - self._data_sources = Input(beam_properties._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + beam_properties._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Result file container allowed to be kept open to cache data. @@ -303,7 +314,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Result file path container, used if no streams are set. @@ -351,45 +362,65 @@ class OutputsBeamProperties(_Outputs): def __init__(self, op: Operator): super().__init__(beam_properties._spec().outputs, op) - self._mesh_out = Output(beam_properties._spec().output_pin(0), 0, op) + self._mesh_out: Output[MeshedRegion] = Output( + beam_properties._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_out) - self._field_type_section_id = Output( + self._field_type_section_id: Output[Field] = Output( beam_properties._spec().output_pin(1), 1, op ) self._outputs.append(self._field_type_section_id) - self._field_area = Output(beam_properties._spec().output_pin(2), 2, op) + self._field_area: Output[Field] = Output( + beam_properties._spec().output_pin(2), 2, op + ) self._outputs.append(self._field_area) - self._field_moment_inertia = Output( + self._field_moment_inertia: Output[Field] = Output( beam_properties._spec().output_pin(3), 3, op ) self._outputs.append(self._field_moment_inertia) - self._field_geometry = Output(beam_properties._spec().output_pin(4), 4, op) + self._field_geometry: Output[Field] = Output( + beam_properties._spec().output_pin(4), 4, op + ) self._outputs.append(self._field_geometry) - self._field_young_modulus = Output(beam_properties._spec().output_pin(5), 5, op) + self._field_young_modulus: Output[Field] = Output( + beam_properties._spec().output_pin(5), 5, op + ) self._outputs.append(self._field_young_modulus) - self._field_poisson_ratio = Output(beam_properties._spec().output_pin(6), 6, op) + self._field_poisson_ratio: Output[Field] = Output( + beam_properties._spec().output_pin(6), 6, op + ) self._outputs.append(self._field_poisson_ratio) - self._field_shear_modulus = Output(beam_properties._spec().output_pin(7), 7, op) + self._field_shear_modulus: Output[Field] = Output( + beam_properties._spec().output_pin(7), 7, op + ) self._outputs.append(self._field_shear_modulus) - self._field_beam_length = Output(beam_properties._spec().output_pin(8), 8, op) + self._field_beam_length: Output[Field] = Output( + beam_properties._spec().output_pin(8), 8, op + ) self._outputs.append(self._field_beam_length) - self._field_torsion_constant = Output( + self._field_torsion_constant: Output[Field] = Output( beam_properties._spec().output_pin(9), 9, op ) self._outputs.append(self._field_torsion_constant) - self._field_warping_constant = Output( + self._field_warping_constant: Output[Field] = Output( beam_properties._spec().output_pin(10), 10, op ) self._outputs.append(self._field_warping_constant) - self._field_offset_type = Output(beam_properties._spec().output_pin(11), 11, op) + self._field_offset_type: Output[Field] = Output( + beam_properties._spec().output_pin(11), 11, op + ) self._outputs.append(self._field_offset_type) - self._field_offset_y = Output(beam_properties._spec().output_pin(12), 12, op) + self._field_offset_y: Output[Field] = Output( + beam_properties._spec().output_pin(12), 12, op + ) self._outputs.append(self._field_offset_y) - self._field_offset_z = Output(beam_properties._spec().output_pin(13), 13, op) + self._field_offset_z: Output[Field] = Output( + beam_properties._spec().output_pin(13), 13, op + ) self._outputs.append(self._field_offset_z) @property - def mesh_out(self) -> Output: + def mesh_out(self) -> Output[MeshedRegion]: r"""Allows to get mesh_out output of the operator This mesh updates a new map containing a field of the beam's properties if there is at least one beam in mesh. @@ -409,7 +440,7 @@ def mesh_out(self) -> Output: return self._mesh_out @property - def field_type_section_id(self) -> Output: + def field_type_section_id(self) -> Output[Field]: r"""Allows to get field_type_section_id output of the operator This field contains the section ID of beams. 1:REC; 3:CSOLID, 4:CTUBE, 5:CHAN, 6:Z, 7:L, 8:I, 9:T, 11:HATS, 12:HREC. @@ -429,7 +460,7 @@ def field_type_section_id(self) -> Output: return self._field_type_section_id @property - def field_area(self) -> Output: + def field_area(self) -> Output[Field]: r"""Allows to get field_area output of the operator This field contains the area of beams. @@ -449,7 +480,7 @@ def field_area(self) -> Output: return self._field_area @property - def field_moment_inertia(self) -> Output: + def field_moment_inertia(self) -> Output[Field]: r"""Allows to get field_moment_inertia output of the operator This field contains the inertia moment of beams. Iyy, Iyz, Izz. @@ -469,7 +500,7 @@ def field_moment_inertia(self) -> Output: return self._field_moment_inertia @property - def field_geometry(self) -> Output: + def field_geometry(self) -> Output[Field]: r"""Allows to get field_geometry output of the operator This field contains the geometry of beams. REC:b,h. CSOLID:Ri. CTUBE:Ri, Re. CHAN:w1,w2,w3,t1,t2,t3. Z:w1,w2,w3,t1,t2,t3. L:w1,w2,t1,t2. I:w1,w2,w3,t1,t2,t3. T:w1,w2,t1,t2. HATS: w1,w2,w3,w4,t1,t2,t3,t4. HREC:w1,w2,t1,t2,t3,t4. @@ -489,7 +520,7 @@ def field_geometry(self) -> Output: return self._field_geometry @property - def field_young_modulus(self) -> Output: + def field_young_modulus(self) -> Output[Field]: r"""Allows to get field_young_modulus output of the operator This field contains the Young's modulus of beams. @@ -509,7 +540,7 @@ def field_young_modulus(self) -> Output: return self._field_young_modulus @property - def field_poisson_ratio(self) -> Output: + def field_poisson_ratio(self) -> Output[Field]: r"""Allows to get field_poisson_ratio output of the operator This field contains the Poisson's ratio of beams. @@ -529,7 +560,7 @@ def field_poisson_ratio(self) -> Output: return self._field_poisson_ratio @property - def field_shear_modulus(self) -> Output: + def field_shear_modulus(self) -> Output[Field]: r"""Allows to get field_shear_modulus output of the operator This field contains the Shear Modulus of beams. @@ -549,7 +580,7 @@ def field_shear_modulus(self) -> Output: return self._field_shear_modulus @property - def field_beam_length(self) -> Output: + def field_beam_length(self) -> Output[Field]: r"""Allows to get field_beam_length output of the operator This field contains the length of beams. @@ -569,7 +600,7 @@ def field_beam_length(self) -> Output: return self._field_beam_length @property - def field_torsion_constant(self) -> Output: + def field_torsion_constant(self) -> Output[Field]: r"""Allows to get field_torsion_constant output of the operator This field contains the Torsion Constant of beams. @@ -589,7 +620,7 @@ def field_torsion_constant(self) -> Output: return self._field_torsion_constant @property - def field_warping_constant(self) -> Output: + def field_warping_constant(self) -> Output[Field]: r"""Allows to get field_warping_constant output of the operator This field contains the Warping Constant of beams. @@ -609,7 +640,7 @@ def field_warping_constant(self) -> Output: return self._field_warping_constant @property - def field_offset_type(self) -> Output: + def field_offset_type(self) -> Output[Field]: r"""Allows to get field_offset_type output of the operator This field contains offset type of beams. @@ -629,7 +660,7 @@ def field_offset_type(self) -> Output: return self._field_offset_type @property - def field_offset_y(self) -> Output: + def field_offset_y(self) -> Output[Field]: r"""Allows to get field_offset_y output of the operator This field contains offset y of beams. @@ -649,7 +680,7 @@ def field_offset_y(self) -> Output: return self._field_offset_y @property - def field_offset_z(self) -> Output: + def field_offset_z(self) -> Output[Field]: r"""Allows to get field_offset_z output of the operator This field contains offset z of beams. diff --git a/src/ansys/dpf/core/operators/mesh/change_cs.py b/src/ansys/dpf/core/operators/mesh/change_cs.py index bcd490a5985..6bafc676703 100644 --- a/src/ansys/dpf/core/operators/mesh/change_cs.py +++ b/src/ansys/dpf/core/operators/mesh/change_cs.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + class change_cs(Operator): r"""Applies a transformation (rotation and displacement) matrix on a mesh or @@ -158,13 +164,17 @@ class InputsChangeCs(_Inputs): def __init__(self, op: Operator): super().__init__(change_cs._spec().inputs, op) - self._meshes = Input(change_cs._spec().input_pin(0), 0, op, -1) + self._meshes: Input[MeshedRegion | MeshesContainer] = Input( + change_cs._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._meshes) - self._coordinate_system = Input(change_cs._spec().input_pin(1), 1, op, -1) + self._coordinate_system: Input[Field] = Input( + change_cs._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._coordinate_system) @property - def meshes(self) -> Input: + def meshes(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect meshes input to the operator. Returns @@ -183,7 +193,7 @@ def meshes(self) -> Input: return self._meshes @property - def coordinate_system(self) -> Input: + def coordinate_system(self) -> Input[Field]: r"""Allows to connect coordinate_system input to the operator. 3-3 rotation matrix + 3 translations (X, Y, Z) diff --git a/src/ansys/dpf/core/operators/mesh/combine_levelset.py b/src/ansys/dpf/core/operators/mesh/combine_levelset.py index 1c458e6f38e..0f8a806637e 100644 --- a/src/ansys/dpf/core/operators/mesh/combine_levelset.py +++ b/src/ansys/dpf/core/operators/mesh/combine_levelset.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class combine_levelset(Operator): r"""Takes two level sets and computes their binary union. @@ -154,13 +158,17 @@ class InputsCombineLevelset(_Inputs): def __init__(self, op: Operator): super().__init__(combine_levelset._spec().inputs, op) - self._fieldA = Input(combine_levelset._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field] = Input( + combine_levelset._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(combine_levelset._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field] = Input( + combine_levelset._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field]: r"""Allows to connect fieldA input to the operator. Returns @@ -179,7 +187,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field]: r"""Allows to connect fieldB input to the operator. Returns @@ -212,11 +220,13 @@ class OutputsCombineLevelset(_Outputs): def __init__(self, op: Operator): super().__init__(combine_levelset._spec().outputs, op) - self._field = Output(combine_levelset._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + combine_levelset._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/decimate_mesh.py b/src/ansys/dpf/core/operators/mesh/decimate_mesh.py index b3ac40e1381..bfd978729fd 100644 --- a/src/ansys/dpf/core/operators/mesh/decimate_mesh.py +++ b/src/ansys/dpf/core/operators/mesh/decimate_mesh.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + class decimate_mesh(Operator): r"""Decimate a meshed region @@ -179,15 +183,21 @@ class InputsDecimateMesh(_Inputs): def __init__(self, op: Operator): super().__init__(decimate_mesh._spec().inputs, op) - self._mesh = Input(decimate_mesh._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + decimate_mesh._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._preservation_ratio = Input(decimate_mesh._spec().input_pin(1), 1, op, -1) + self._preservation_ratio: Input[float] = Input( + decimate_mesh._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._preservation_ratio) - self._aggressiveness = Input(decimate_mesh._spec().input_pin(2), 2, op, -1) + self._aggressiveness: Input[int] = Input( + decimate_mesh._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._aggressiveness) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh to decimate @@ -208,7 +218,7 @@ def mesh(self) -> Input: return self._mesh @property - def preservation_ratio(self) -> Input: + def preservation_ratio(self) -> Input[float]: r"""Allows to connect preservation_ratio input to the operator. Target ratio of elements to preserve, the actual number of elements preserved might differ. Default value is 0.5. @@ -229,7 +239,7 @@ def preservation_ratio(self) -> Input: return self._preservation_ratio @property - def aggressiveness(self) -> Input: + def aggressiveness(self) -> Input[int]: r"""Allows to connect aggressiveness input to the operator. Quality measure for the resulting decimated mesh. Lower aggresiveness will provide a higher quality mesh with the tradeoff of higher execution time. Value range is 0 to 150, default is 0. @@ -264,11 +274,13 @@ class OutputsDecimateMesh(_Outputs): def __init__(self, op: Operator): super().__init__(decimate_mesh._spec().outputs, op) - self._mesh = Output(decimate_mesh._spec().output_pin(0), 0, op) + self._mesh: Output[MeshedRegion] = Output( + decimate_mesh._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh) @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Decimated mesh with triangle elements diff --git a/src/ansys/dpf/core/operators/mesh/exclude_levelset.py b/src/ansys/dpf/core/operators/mesh/exclude_levelset.py index 30437eeb0e6..8d68b6fd476 100644 --- a/src/ansys/dpf/core/operators/mesh/exclude_levelset.py +++ b/src/ansys/dpf/core/operators/mesh/exclude_levelset.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class exclude_levelset(Operator): r"""Takes two level sets and excludes the second one from the first. @@ -154,13 +158,17 @@ class InputsExcludeLevelset(_Inputs): def __init__(self, op: Operator): super().__init__(exclude_levelset._spec().inputs, op) - self._fieldA = Input(exclude_levelset._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field] = Input( + exclude_levelset._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(exclude_levelset._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field] = Input( + exclude_levelset._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field]: r"""Allows to connect fieldA input to the operator. Returns @@ -179,7 +187,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field]: r"""Allows to connect fieldB input to the operator. Returns @@ -212,11 +220,13 @@ class OutputsExcludeLevelset(_Outputs): def __init__(self, op: Operator): super().__init__(exclude_levelset._spec().outputs, op) - self._field = Output(exclude_levelset._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + exclude_levelset._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/external_layer.py b/src/ansys/dpf/core/operators/mesh/external_layer.py index e4f7536c517..6f7afd0f668 100644 --- a/src/ansys/dpf/core/operators/mesh/external_layer.py +++ b/src/ansys/dpf/core/operators/mesh/external_layer.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class external_layer(Operator): r"""Extracts the external layer (thick skin) of the mesh (3D elements) in a @@ -162,11 +167,13 @@ class InputsExternalLayer(_Inputs): def __init__(self, op: Operator): super().__init__(external_layer._spec().inputs, op) - self._mesh = Input(external_layer._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + external_layer._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -201,17 +208,21 @@ class OutputsExternalLayer(_Outputs): def __init__(self, op: Operator): super().__init__(external_layer._spec().outputs, op) - self._mesh = Output(external_layer._spec().output_pin(0), 0, op) + self._mesh: Output[MeshedRegion] = Output( + external_layer._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh) - self._nodes_mesh_scoping = Output(external_layer._spec().output_pin(1), 1, op) + self._nodes_mesh_scoping: Output[Scoping] = Output( + external_layer._spec().output_pin(1), 1, op + ) self._outputs.append(self._nodes_mesh_scoping) - self._elements_mesh_scoping = Output( + self._elements_mesh_scoping: Output[Scoping] = Output( external_layer._spec().output_pin(2), 2, op ) self._outputs.append(self._elements_mesh_scoping) @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Returns @@ -229,7 +240,7 @@ def mesh(self) -> Output: return self._mesh @property - def nodes_mesh_scoping(self) -> Output: + def nodes_mesh_scoping(self) -> Output[Scoping]: r"""Allows to get nodes_mesh_scoping output of the operator Returns @@ -247,7 +258,7 @@ def nodes_mesh_scoping(self) -> Output: return self._nodes_mesh_scoping @property - def elements_mesh_scoping(self) -> Output: + def elements_mesh_scoping(self) -> Output[Scoping]: r"""Allows to get elements_mesh_scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/from_field.py b/src/ansys/dpf/core/operators/mesh/from_field.py index f63ec5e19ef..38ec39fe106 100644 --- a/src/ansys/dpf/core/operators/mesh/from_field.py +++ b/src/ansys/dpf/core/operators/mesh/from_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + class from_field(Operator): r"""Returns the meshed region contained in the support of the mesh. @@ -140,11 +145,11 @@ class InputsFromField(_Inputs): def __init__(self, op: Operator): super().__init__(from_field._spec().inputs, op) - self._field = Input(from_field._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(from_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -177,11 +182,13 @@ class OutputsFromField(_Outputs): def __init__(self, op: Operator): super().__init__(from_field._spec().outputs, op) - self._mesh = Output(from_field._spec().output_pin(0), 0, op) + self._mesh: Output[MeshedRegion] = Output( + from_field._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh) @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/from_scoping.py b/src/ansys/dpf/core/operators/mesh/from_scoping.py index 290a531dc3b..7af8c3008fc 100644 --- a/src/ansys/dpf/core/operators/mesh/from_scoping.py +++ b/src/ansys/dpf/core/operators/mesh/from_scoping.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class from_scoping(Operator): r"""Extracts a meshed region from another meshed region based on a scoping. @@ -203,17 +208,25 @@ class InputsFromScoping(_Inputs): def __init__(self, op: Operator): super().__init__(from_scoping._spec().inputs, op) - self._scoping = Input(from_scoping._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + from_scoping._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._inclusive = Input(from_scoping._spec().input_pin(2), 2, op, -1) + self._inclusive: Input[int] = Input( + from_scoping._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._inclusive) - self._nodes_only = Input(from_scoping._spec().input_pin(3), 3, op, -1) + self._nodes_only: Input[bool] = Input( + from_scoping._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._nodes_only) - self._mesh = Input(from_scoping._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + from_scoping._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. if nodal/face scoping, then the scoping is transposed respecting the inclusive pin @@ -234,7 +247,7 @@ def scoping(self) -> Input: return self._scoping @property - def inclusive(self) -> Input: + def inclusive(self) -> Input[int]: r"""Allows to connect inclusive input to the operator. if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included @@ -255,7 +268,7 @@ def inclusive(self) -> Input: return self._inclusive @property - def nodes_only(self) -> Input: + def nodes_only(self) -> Input[bool]: r"""Allows to connect nodes_only input to the operator. returns mesh with nodes only (without any elements or property fields). Default is false. @@ -276,7 +289,7 @@ def nodes_only(self) -> Input: return self._nodes_only @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -309,11 +322,13 @@ class OutputsFromScoping(_Outputs): def __init__(self, op: Operator): super().__init__(from_scoping._spec().outputs, op) - self._mesh = Output(from_scoping._spec().output_pin(0), 0, op) + self._mesh: Output[MeshedRegion] = Output( + from_scoping._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh) @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/from_scopings.py b/src/ansys/dpf/core/operators/mesh/from_scopings.py index 5ebac9d24e6..8c38e0d468a 100644 --- a/src/ansys/dpf/core/operators/mesh/from_scopings.py +++ b/src/ansys/dpf/core/operators/mesh/from_scopings.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scopings_container import ScopingsContainer + class from_scopings(Operator): r"""Extracts multiple meshed region base on a scoping and saved in a @@ -195,17 +201,25 @@ class InputsFromScopings(_Inputs): def __init__(self, op: Operator): super().__init__(from_scopings._spec().inputs, op) - self._scopings_container = Input(from_scopings._spec().input_pin(1), 1, op, -1) + self._scopings_container: Input[ScopingsContainer] = Input( + from_scopings._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scopings_container) - self._inclusive = Input(from_scopings._spec().input_pin(2), 2, op, -1) + self._inclusive: Input[int] = Input( + from_scopings._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._inclusive) - self._nodes_only = Input(from_scopings._spec().input_pin(3), 3, op, -1) + self._nodes_only: Input[bool] = Input( + from_scopings._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._nodes_only) - self._mesh = Input(from_scopings._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + from_scopings._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def scopings_container(self) -> Input: + def scopings_container(self) -> Input[ScopingsContainer]: r"""Allows to connect scopings_container input to the operator. if nodal scoping, then the scoping is transposed respecting the inclusive pin @@ -226,7 +240,7 @@ def scopings_container(self) -> Input: return self._scopings_container @property - def inclusive(self) -> Input: + def inclusive(self) -> Input[int]: r"""Allows to connect inclusive input to the operator. if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included @@ -247,7 +261,7 @@ def inclusive(self) -> Input: return self._inclusive @property - def nodes_only(self) -> Input: + def nodes_only(self) -> Input[bool]: r"""Allows to connect nodes_only input to the operator. returns mesh with nodes only (without any elements). Default is false. @@ -268,7 +282,7 @@ def nodes_only(self) -> Input: return self._nodes_only @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -301,11 +315,13 @@ class OutputsFromScopings(_Outputs): def __init__(self, op: Operator): super().__init__(from_scopings._spec().outputs, op) - self._meshes = Output(from_scopings._spec().output_pin(0), 0, op) + self._meshes: Output[MeshesContainer] = Output( + from_scopings._spec().output_pin(0), 0, op + ) self._outputs.append(self._meshes) @property - def meshes(self) -> Output: + def meshes(self) -> Output[MeshesContainer]: r"""Allows to get meshes output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/iso_surfaces.py b/src/ansys/dpf/core/operators/mesh/iso_surfaces.py index 2ed7b9aa719..3f17f058444 100644 --- a/src/ansys/dpf/core/operators/mesh/iso_surfaces.py +++ b/src/ansys/dpf/core/operators/mesh/iso_surfaces.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + class iso_surfaces(Operator): r"""Extract multiple iso-contours from mesh_cut operator and set it into a @@ -226,19 +233,27 @@ class InputsIsoSurfaces(_Inputs): def __init__(self, op: Operator): super().__init__(iso_surfaces._spec().inputs, op) - self._field = Input(iso_surfaces._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(iso_surfaces._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._num_surfaces = Input(iso_surfaces._spec().input_pin(1), 1, op, -1) + self._num_surfaces: Input[int] = Input( + iso_surfaces._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._num_surfaces) - self._mesh = Input(iso_surfaces._spec().input_pin(2), 2, op, -1) + self._mesh: Input[MeshedRegion] = Input( + iso_surfaces._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._mesh) - self._slice_surfaces = Input(iso_surfaces._spec().input_pin(3), 3, op, -1) + self._slice_surfaces: Input[bool] = Input( + iso_surfaces._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._slice_surfaces) - self._vector_iso_values = Input(iso_surfaces._spec().input_pin(4), 4, op, -1) + self._vector_iso_values: Input = Input( + iso_surfaces._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._vector_iso_values) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Field containing the values for the iso-surface computation. The mesh can be retrieved from this field's support or through pin 2. @@ -259,7 +274,7 @@ def field(self) -> Input: return self._field @property - def num_surfaces(self) -> Input: + def num_surfaces(self) -> Input[int]: r"""Allows to connect num_surfaces input to the operator. If provided, iso_values are linearly computed between the min and the max of the field of results. If not, iso_values must be provided by the user through pin 4 @@ -280,7 +295,7 @@ def num_surfaces(self) -> Input: return self._num_surfaces @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh to compute the iso-surface from. Used when not given through the support of the field in pin 0. @@ -301,7 +316,7 @@ def mesh(self) -> Input: return self._mesh @property - def slice_surfaces(self) -> Input: + def slice_surfaces(self) -> Input[bool]: r"""Allows to connect slice_surfaces input to the operator. True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true. @@ -358,13 +373,17 @@ class OutputsIsoSurfaces(_Outputs): def __init__(self, op: Operator): super().__init__(iso_surfaces._spec().outputs, op) - self._meshes = Output(iso_surfaces._spec().output_pin(0), 0, op) + self._meshes: Output[MeshesContainer] = Output( + iso_surfaces._spec().output_pin(0), 0, op + ) self._outputs.append(self._meshes) - self._fields_container = Output(iso_surfaces._spec().output_pin(1), 1, op) + self._fields_container: Output[FieldsContainer] = Output( + iso_surfaces._spec().output_pin(1), 1, op + ) self._outputs.append(self._fields_container) @property - def meshes(self) -> Output: + def meshes(self) -> Output[MeshesContainer]: r"""Allows to get meshes output of the operator Returns @@ -382,7 +401,7 @@ def meshes(self) -> Output: return self._meshes @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/make_plane_levelset.py b/src/ansys/dpf/core/operators/mesh/make_plane_levelset.py index 0cb1335a45e..89bb681f2e3 100644 --- a/src/ansys/dpf/core/operators/mesh/make_plane_levelset.py +++ b/src/ansys/dpf/core/operators/mesh/make_plane_levelset.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + class make_plane_levelset(Operator): r"""Computes the level set for a plane using coordinates. @@ -172,15 +177,21 @@ class InputsMakePlaneLevelset(_Inputs): def __init__(self, op: Operator): super().__init__(make_plane_levelset._spec().inputs, op) - self._coordinates = Input(make_plane_levelset._spec().input_pin(0), 0, op, -1) + self._coordinates: Input[MeshedRegion | Field] = Input( + make_plane_levelset._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._coordinates) - self._normal = Input(make_plane_levelset._spec().input_pin(1), 1, op, -1) + self._normal: Input[Field] = Input( + make_plane_levelset._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._normal) - self._origin = Input(make_plane_levelset._spec().input_pin(2), 2, op, -1) + self._origin: Input[Field] = Input( + make_plane_levelset._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._origin) @property - def coordinates(self) -> Input: + def coordinates(self) -> Input[MeshedRegion | Field]: r"""Allows to connect coordinates input to the operator. Returns @@ -199,7 +210,7 @@ def coordinates(self) -> Input: return self._coordinates @property - def normal(self) -> Input: + def normal(self) -> Input[Field]: r"""Allows to connect normal input to the operator. An overall 3D vector that gives the normal direction of the plane. @@ -220,7 +231,7 @@ def normal(self) -> Input: return self._normal @property - def origin(self) -> Input: + def origin(self) -> Input[Field]: r"""Allows to connect origin input to the operator. An overall 3d vector that gives a point of the plane. @@ -255,11 +266,13 @@ class OutputsMakePlaneLevelset(_Outputs): def __init__(self, op: Operator): super().__init__(make_plane_levelset._spec().outputs, op) - self._field = Output(make_plane_levelset._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + make_plane_levelset._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/make_sphere_levelset.py b/src/ansys/dpf/core/operators/mesh/make_sphere_levelset.py index 40ff9cbdfd3..02006da5834 100644 --- a/src/ansys/dpf/core/operators/mesh/make_sphere_levelset.py +++ b/src/ansys/dpf/core/operators/mesh/make_sphere_levelset.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + class make_sphere_levelset(Operator): r"""Computes the level set for a sphere using coordinates. @@ -172,15 +177,21 @@ class InputsMakeSphereLevelset(_Inputs): def __init__(self, op: Operator): super().__init__(make_sphere_levelset._spec().inputs, op) - self._coordinates = Input(make_sphere_levelset._spec().input_pin(0), 0, op, -1) + self._coordinates: Input[MeshedRegion | Field] = Input( + make_sphere_levelset._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._coordinates) - self._origin = Input(make_sphere_levelset._spec().input_pin(1), 1, op, -1) + self._origin: Input[Field] = Input( + make_sphere_levelset._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._origin) - self._radius = Input(make_sphere_levelset._spec().input_pin(2), 2, op, -1) + self._radius: Input[float] = Input( + make_sphere_levelset._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._radius) @property - def coordinates(self) -> Input: + def coordinates(self) -> Input[MeshedRegion | Field]: r"""Allows to connect coordinates input to the operator. Returns @@ -199,7 +210,7 @@ def coordinates(self) -> Input: return self._coordinates @property - def origin(self) -> Input: + def origin(self) -> Input[Field]: r"""Allows to connect origin input to the operator. An overall 3d vector that gives a point of the plane. @@ -220,7 +231,7 @@ def origin(self) -> Input: return self._origin @property - def radius(self) -> Input: + def radius(self) -> Input[float]: r"""Allows to connect radius input to the operator. Sphere radius. @@ -255,11 +266,13 @@ class OutputsMakeSphereLevelset(_Outputs): def __init__(self, op: Operator): super().__init__(make_sphere_levelset._spec().outputs, op) - self._field = Output(make_sphere_levelset._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + make_sphere_levelset._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/mesh_clip.py b/src/ansys/dpf/core/operators/mesh/mesh_clip.py index 2528ab1510c..f3faebf33bf 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_clip.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_clip.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + class mesh_clip(Operator): r"""Clips a volume mesh along an iso value x, and construct the volume mesh @@ -218,19 +223,25 @@ class InputsMeshClip(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_clip._spec().inputs, op) - self._field = Input(mesh_clip._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(mesh_clip._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._iso_value = Input(mesh_clip._spec().input_pin(1), 1, op, -1) + self._iso_value: Input[float] = Input(mesh_clip._spec().input_pin(1), 1, op, -1) self._inputs.append(self._iso_value) - self._closed_surface = Input(mesh_clip._spec().input_pin(2), 2, op, -1) + self._closed_surface: Input[int] = Input( + mesh_clip._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._closed_surface) - self._mesh = Input(mesh_clip._spec().input_pin(3), 3, op, -1) + self._mesh: Input[MeshedRegion] = Input( + mesh_clip._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._mesh) - self._slice_surfaces = Input(mesh_clip._spec().input_pin(4), 4, op, -1) + self._slice_surfaces: Input[bool] = Input( + mesh_clip._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._slice_surfaces) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -249,7 +260,7 @@ def field(self) -> Input: return self._field @property - def iso_value(self) -> Input: + def iso_value(self) -> Input[float]: r"""Allows to connect iso_value input to the operator. iso value @@ -270,7 +281,7 @@ def iso_value(self) -> Input: return self._iso_value @property - def closed_surface(self) -> Input: + def closed_surface(self) -> Input[int]: r"""Allows to connect closed_surface input to the operator. 1: closed surface, 0: iso surface. @@ -291,7 +302,7 @@ def closed_surface(self) -> Input: return self._closed_surface @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -310,7 +321,7 @@ def mesh(self) -> Input: return self._mesh @property - def slice_surfaces(self) -> Input: + def slice_surfaces(self) -> Input[bool]: r"""Allows to connect slice_surfaces input to the operator. True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true. @@ -346,13 +357,15 @@ class OutputsMeshClip(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_clip._spec().outputs, op) - self._field = Output(mesh_clip._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(mesh_clip._spec().output_pin(0), 0, op) self._outputs.append(self._field) - self._mesh = Output(mesh_clip._spec().output_pin(2), 2, op) + self._mesh: Output[MeshedRegion] = Output( + mesh_clip._spec().output_pin(2), 2, op + ) self._outputs.append(self._mesh) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns @@ -370,7 +383,7 @@ def field(self) -> Output: return self._field @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/mesh_cut.py b/src/ansys/dpf/core/operators/mesh/mesh_cut.py index 2321986cec8..ded55c928dc 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_cut.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_cut.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + class mesh_cut(Operator): r"""Extracts a skin of the mesh in triangles in a new meshed region. @@ -210,19 +215,25 @@ class InputsMeshCut(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_cut._spec().inputs, op) - self._field = Input(mesh_cut._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(mesh_cut._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._iso_value = Input(mesh_cut._spec().input_pin(1), 1, op, -1) + self._iso_value: Input[float] = Input(mesh_cut._spec().input_pin(1), 1, op, -1) self._inputs.append(self._iso_value) - self._closed_surface = Input(mesh_cut._spec().input_pin(2), 2, op, -1) + self._closed_surface: Input[int] = Input( + mesh_cut._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._closed_surface) - self._mesh = Input(mesh_cut._spec().input_pin(3), 3, op, -1) + self._mesh: Input[MeshedRegion] = Input( + mesh_cut._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._mesh) - self._slice_surfaces = Input(mesh_cut._spec().input_pin(4), 4, op, -1) + self._slice_surfaces: Input[bool] = Input( + mesh_cut._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._slice_surfaces) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Field containing the values for the iso-surface computation. The mesh can be retrieved from this field's support or through pin 2. @@ -243,7 +254,7 @@ def field(self) -> Input: return self._field @property - def iso_value(self) -> Input: + def iso_value(self) -> Input[float]: r"""Allows to connect iso_value input to the operator. iso value @@ -264,7 +275,7 @@ def iso_value(self) -> Input: return self._iso_value @property - def closed_surface(self) -> Input: + def closed_surface(self) -> Input[int]: r"""Allows to connect closed_surface input to the operator. 1: closed surface, 0: iso surface. @@ -285,7 +296,7 @@ def closed_surface(self) -> Input: return self._closed_surface @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh to compute the iso-surface from. Used when not given through the support of the field in pin 0. @@ -306,7 +317,7 @@ def mesh(self) -> Input: return self._mesh @property - def slice_surfaces(self) -> Input: + def slice_surfaces(self) -> Input[bool]: r"""Allows to connect slice_surfaces input to the operator. True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true. @@ -341,11 +352,11 @@ class OutputsMeshCut(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_cut._spec().outputs, op) - self._mesh = Output(mesh_cut._spec().output_pin(2), 2, op) + self._mesh: Output[MeshedRegion] = Output(mesh_cut._spec().output_pin(2), 2, op) self._outputs.append(self._mesh) @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/mesh_extraction.py b/src/ansys/dpf/core/operators/mesh/mesh_extraction.py index ea411031acc..206cf1f5ae1 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_extraction.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_extraction.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class mesh_extraction(Operator): r"""Take a mesh and a scoping (elemental or nodal) and create a new mesh @@ -173,15 +178,21 @@ class InputsMeshExtraction(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_extraction._spec().inputs, op) - self._mesh = Input(mesh_extraction._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + mesh_extraction._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._mesh_scoping = Input(mesh_extraction._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + mesh_extraction._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._extension = Input(mesh_extraction._spec().input_pin(2), 2, op, -1) + self._extension: Input[int] = Input( + mesh_extraction._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._extension) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -200,7 +211,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -219,7 +230,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def extension(self) -> Input: + def extension(self) -> Input[int]: r"""Allows to connect extension input to the operator. Number of extension layer @@ -254,13 +265,13 @@ class OutputsMeshExtraction(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_extraction._spec().outputs, op) - self._abstract_meshed_region = Output( + self._abstract_meshed_region: Output[MeshedRegion] = Output( mesh_extraction._spec().output_pin(0), 0, op ) self._outputs.append(self._abstract_meshed_region) @property - def abstract_meshed_region(self) -> Output: + def abstract_meshed_region(self) -> Output[MeshedRegion]: r"""Allows to get abstract_meshed_region output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/mesh_get_attribute.py b/src/ansys/dpf/core/operators/mesh/mesh_get_attribute.py index b180855dfd0..e05fdc147db 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_get_attribute.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_get_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + class mesh_get_attribute(Operator): r"""Uses the MeshedRegion APIs to return a given attribute of the mesh in @@ -187,19 +191,21 @@ class InputsMeshGetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_get_attribute._spec().inputs, op) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( mesh_get_attribute._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._property_name = Input(mesh_get_attribute._spec().input_pin(1), 1, op, -1) + self._property_name: Input[str] = Input( + mesh_get_attribute._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._property_name) - self._property_identifier = Input( + self._property_identifier: Input[int | str] = Input( mesh_get_attribute._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._property_identifier) @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. Returns @@ -218,7 +224,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Supported property names are: "connectivity", "reverse_connectivity", "mat", "faces_nodes_connectivity", "elements_faces_connectivity" (or any mesh's property field), "coordinates", "named_selection", "num_named_selections", "named_selection_names", "named_selection_locations", "node_scoping", "element_scoping", "face_scoping"... @@ -239,7 +245,7 @@ def property_name(self) -> Input: return self._property_name @property - def property_identifier(self) -> Input: + def property_identifier(self) -> Input[int | str]: r"""Allows to connect property_identifier input to the operator. Can be used to get a property at a given index, example: a named selection's number or by name, example: a named selection's name. diff --git a/src/ansys/dpf/core/operators/mesh/mesh_plan_clip.py b/src/ansys/dpf/core/operators/mesh/mesh_plan_clip.py index 69b32ec7016..362bb5b5c62 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_plan_clip.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_plan_clip.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + class mesh_plan_clip(Operator): r"""Clips a volume mesh along a plane and keeps one side. @@ -180,15 +185,21 @@ class InputsMeshPlanClip(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_plan_clip._spec().inputs, op) - self._mesh_or_field = Input(mesh_plan_clip._spec().input_pin(0), 0, op, -1) + self._mesh_or_field: Input[MeshedRegion | Field] = Input( + mesh_plan_clip._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh_or_field) - self._normal = Input(mesh_plan_clip._spec().input_pin(1), 1, op, -1) + self._normal: Input[Field] = Input( + mesh_plan_clip._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._normal) - self._origin = Input(mesh_plan_clip._spec().input_pin(2), 2, op, -1) + self._origin: Input[Field] = Input( + mesh_plan_clip._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._origin) @property - def mesh_or_field(self) -> Input: + def mesh_or_field(self) -> Input[MeshedRegion | Field]: r"""Allows to connect mesh_or_field input to the operator. Returns @@ -207,7 +218,7 @@ def mesh_or_field(self) -> Input: return self._mesh_or_field @property - def normal(self) -> Input: + def normal(self) -> Input[Field]: r"""Allows to connect normal input to the operator. An overall 3D vector that gives the normal direction of the plane. @@ -228,7 +239,7 @@ def normal(self) -> Input: return self._normal @property - def origin(self) -> Input: + def origin(self) -> Input[Field]: r"""Allows to connect origin input to the operator. An overall 3D vector that gives a point of the plane. @@ -264,13 +275,15 @@ class OutputsMeshPlanClip(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_plan_clip._spec().outputs, op) - self._field = Output(mesh_plan_clip._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(mesh_plan_clip._spec().output_pin(0), 0, op) self._outputs.append(self._field) - self._mesh = Output(mesh_plan_clip._spec().output_pin(2), 2, op) + self._mesh: Output[MeshedRegion] = Output( + mesh_plan_clip._spec().output_pin(2), 2, op + ) self._outputs.append(self._mesh) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns @@ -288,7 +301,7 @@ def field(self) -> Output: return self._field @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/mesh_provider.py b/src/ansys/dpf/core/operators/mesh/mesh_provider.py index ff112cfa7f3..0e4d8ff2915 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_provider.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.data_tree import DataTree + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class mesh_provider(Operator): r"""Reads a mesh from result files. @@ -232,21 +240,33 @@ class InputsMeshProvider(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_provider._spec().inputs, op) - self._time_scoping = Input(mesh_provider._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[int] = Input( + mesh_provider._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._streams_container = Input(mesh_provider._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + mesh_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(mesh_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + mesh_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._read_cyclic = Input(mesh_provider._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + mesh_provider._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._region_scoping = Input(mesh_provider._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + mesh_provider._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._laziness = Input(mesh_provider._spec().input_pin(200), 200, op, -1) + self._laziness: Input[DataTree] = Input( + mesh_provider._spec().input_pin(200), 200, op, -1 + ) self._inputs.append(self._laziness) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[int]: r"""Allows to connect time_scoping input to the operator. Optional time/frequency set ID of the mesh, supported for adaptative meshes. @@ -267,7 +287,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -288,7 +308,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -309,7 +329,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. If 1, cyclic symmetry is ignored. If 2, cyclic expansion is done (default is 1). @@ -330,7 +350,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids with one entity (vector) or region scoping with one id (scoping) (region corresponds to zone for Fluid results or part for LSDyna results). @@ -351,7 +371,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def laziness(self) -> Input: + def laziness(self) -> Input[DataTree]: r"""Allows to connect laziness input to the operator. configurate whether lazy evaluation can be performed and to what extent. Supported attributes are: @@ -389,11 +409,13 @@ class OutputsMeshProvider(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_provider._spec().outputs, op) - self._mesh = Output(mesh_provider._spec().output_pin(0), 0, op) + self._mesh: Output[MeshedRegion] = Output( + mesh_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh) @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/mesh_to_graphics.py b/src/ansys/dpf/core/operators/mesh/mesh_to_graphics.py index 2b9a7ab5aa6..d8cf1bc13ea 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_to_graphics.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_to_graphics.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.scoping import Scoping + class mesh_to_graphics(Operator): r"""Generate tessellation for input mesh @@ -189,15 +196,21 @@ class InputsMeshToGraphics(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_to_graphics._spec().inputs, op) - self._mesh_scoping = Input(mesh_to_graphics._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + mesh_to_graphics._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._node_normals = Input(mesh_to_graphics._spec().input_pin(2), 2, op, -1) + self._node_normals: Input[bool] = Input( + mesh_to_graphics._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._node_normals) - self._mesh = Input(mesh_to_graphics._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + mesh_to_graphics._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -216,7 +229,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def node_normals(self) -> Input: + def node_normals(self) -> Input[bool]: r"""Allows to connect node_normals input to the operator. average element normals for node normals (default no, use element normals for node normals) @@ -237,7 +250,7 @@ def node_normals(self) -> Input: return self._node_normals @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -272,15 +285,21 @@ class OutputsMeshToGraphics(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_to_graphics._spec().outputs, op) - self._nodes = Output(mesh_to_graphics._spec().output_pin(0), 0, op) + self._nodes: Output[Field] = Output( + mesh_to_graphics._spec().output_pin(0), 0, op + ) self._outputs.append(self._nodes) - self._normals = Output(mesh_to_graphics._spec().output_pin(1), 1, op) + self._normals: Output[Field] = Output( + mesh_to_graphics._spec().output_pin(1), 1, op + ) self._outputs.append(self._normals) - self._connectivity = Output(mesh_to_graphics._spec().output_pin(2), 2, op) + self._connectivity: Output[PropertyField] = Output( + mesh_to_graphics._spec().output_pin(2), 2, op + ) self._outputs.append(self._connectivity) @property - def nodes(self) -> Output: + def nodes(self) -> Output[Field]: r"""Allows to get nodes output of the operator node coordinates @@ -300,7 +319,7 @@ def nodes(self) -> Output: return self._nodes @property - def normals(self) -> Output: + def normals(self) -> Output[Field]: r"""Allows to get normals output of the operator node normals @@ -320,7 +339,7 @@ def normals(self) -> Output: return self._normals @property - def connectivity(self) -> Output: + def connectivity(self) -> Output[PropertyField]: r"""Allows to get connectivity output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py b/src/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py index 6fbb95f4389..c1638f077c8 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.scoping import Scoping + class mesh_to_graphics_edges(Operator): r"""Generate edges of surface elements for input mesh @@ -184,19 +191,21 @@ class InputsMeshToGraphicsEdges(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_to_graphics_edges._spec().inputs, op) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( mesh_to_graphics_edges._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._include_mid_nodes = Input( + self._include_mid_nodes: Input[bool] = Input( mesh_to_graphics_edges._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._include_mid_nodes) - self._mesh = Input(mesh_to_graphics_edges._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + mesh_to_graphics_edges._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -215,7 +224,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def include_mid_nodes(self) -> Input: + def include_mid_nodes(self) -> Input[bool]: r"""Allows to connect include_mid_nodes input to the operator. Returns @@ -234,7 +243,7 @@ def include_mid_nodes(self) -> Input: return self._include_mid_nodes @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -268,13 +277,17 @@ class OutputsMeshToGraphicsEdges(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_to_graphics_edges._spec().outputs, op) - self._nodes = Output(mesh_to_graphics_edges._spec().output_pin(0), 0, op) + self._nodes: Output[Field] = Output( + mesh_to_graphics_edges._spec().output_pin(0), 0, op + ) self._outputs.append(self._nodes) - self._connectivity = Output(mesh_to_graphics_edges._spec().output_pin(2), 2, op) + self._connectivity: Output[PropertyField] = Output( + mesh_to_graphics_edges._spec().output_pin(2), 2, op + ) self._outputs.append(self._connectivity) @property - def nodes(self) -> Output: + def nodes(self) -> Output[Field]: r"""Allows to get nodes output of the operator node coordinates @@ -294,7 +307,7 @@ def nodes(self) -> Output: return self._nodes @property - def connectivity(self) -> Output: + def connectivity(self) -> Output[PropertyField]: r"""Allows to get connectivity output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/mesh_to_pyvista.py b/src/ansys/dpf/core/operators/mesh/mesh_to_pyvista.py index ede1c3452ad..1782c2bc758 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_to_pyvista.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_to_pyvista.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + class mesh_to_pyvista(Operator): r"""Export a MeshedRegion in the pyVista format. @@ -238,19 +243,29 @@ class InputsMeshToPyvista(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_to_pyvista._spec().inputs, op) - self._coordinates = Input(mesh_to_pyvista._spec().input_pin(2), 2, op, -1) + self._coordinates: Input[Field] = Input( + mesh_to_pyvista._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._coordinates) - self._as_linear = Input(mesh_to_pyvista._spec().input_pin(6), 6, op, -1) + self._as_linear: Input[bool] = Input( + mesh_to_pyvista._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._as_linear) - self._mesh = Input(mesh_to_pyvista._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + mesh_to_pyvista._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._vtk_updated = Input(mesh_to_pyvista._spec().input_pin(60), 60, op, -1) + self._vtk_updated: Input[bool] = Input( + mesh_to_pyvista._spec().input_pin(60), 60, op, -1 + ) self._inputs.append(self._vtk_updated) - self._as_poly = Input(mesh_to_pyvista._spec().input_pin(200), 200, op, -1) + self._as_poly: Input[bool] = Input( + mesh_to_pyvista._spec().input_pin(200), 200, op, -1 + ) self._inputs.append(self._as_poly) @property - def coordinates(self) -> Input: + def coordinates(self) -> Input[Field]: r"""Allows to connect coordinates input to the operator. Node coordinates. If not set, the node coordinates of the mesh are employed. @@ -271,7 +286,7 @@ def coordinates(self) -> Input: return self._coordinates @property - def as_linear(self) -> Input: + def as_linear(self) -> Input[bool]: r"""Allows to connect as_linear input to the operator. Export a linear version of the mesh (quadratic surface elements do no include midside nodes). If not set, defaults to true. @@ -292,7 +307,7 @@ def as_linear(self) -> Input: return self._as_linear @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. mesh to export in pyVista format @@ -313,7 +328,7 @@ def mesh(self) -> Input: return self._mesh @property - def vtk_updated(self) -> Input: + def vtk_updated(self) -> Input[bool]: r"""Allows to connect vtk_updated input to the operator. True if the VTK version employed by pyVista is > VTK 9. Default true. @@ -334,7 +349,7 @@ def vtk_updated(self) -> Input: return self._vtk_updated @property - def as_poly(self) -> Input: + def as_poly(self) -> Input[bool]: r"""Allows to connect as_poly input to the operator. Export elements as polyhedrons (cell-face-node representation). Default false. @@ -372,17 +387,19 @@ class OutputsMeshToPyvista(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_to_pyvista._spec().outputs, op) - self._nodes = Output(mesh_to_pyvista._spec().output_pin(0), 0, op) + self._nodes: Output[Field] = Output( + mesh_to_pyvista._spec().output_pin(0), 0, op + ) self._outputs.append(self._nodes) - self._cells = Output(mesh_to_pyvista._spec().output_pin(1), 1, op) + self._cells: Output = Output(mesh_to_pyvista._spec().output_pin(1), 1, op) self._outputs.append(self._cells) - self._cell_types = Output(mesh_to_pyvista._spec().output_pin(2), 2, op) + self._cell_types: Output = Output(mesh_to_pyvista._spec().output_pin(2), 2, op) self._outputs.append(self._cell_types) - self._offsets = Output(mesh_to_pyvista._spec().output_pin(3), 3, op) + self._offsets: Output = Output(mesh_to_pyvista._spec().output_pin(3), 3, op) self._outputs.append(self._offsets) @property - def nodes(self) -> Output: + def nodes(self) -> Output[Field]: r"""Allows to get nodes output of the operator Node coordinates double vector diff --git a/src/ansys/dpf/core/operators/mesh/mesh_to_tetra.py b/src/ansys/dpf/core/operators/mesh/mesh_to_tetra.py index c1f33b63214..c051ff9f2c1 100644 --- a/src/ansys/dpf/core/operators/mesh/mesh_to_tetra.py +++ b/src/ansys/dpf/core/operators/mesh/mesh_to_tetra.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class mesh_to_tetra(Operator): r"""Converts 3D meshes of arbitrary 3D element types into a tetrahedral @@ -166,11 +171,13 @@ class InputsMeshToTetra(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_to_tetra._spec().inputs, op) - self._mesh = Input(mesh_to_tetra._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + mesh_to_tetra._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh with arbitrary element types. @@ -207,15 +214,21 @@ class OutputsMeshToTetra(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_to_tetra._spec().outputs, op) - self._mesh = Output(mesh_to_tetra._spec().output_pin(0), 0, op) + self._mesh: Output[MeshedRegion] = Output( + mesh_to_tetra._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh) - self._node_mapping = Output(mesh_to_tetra._spec().output_pin(1), 1, op) + self._node_mapping: Output[Scoping] = Output( + mesh_to_tetra._spec().output_pin(1), 1, op + ) self._outputs.append(self._node_mapping) - self._element_mapping = Output(mesh_to_tetra._spec().output_pin(2), 2, op) + self._element_mapping: Output[Scoping] = Output( + mesh_to_tetra._spec().output_pin(2), 2, op + ) self._outputs.append(self._element_mapping) @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Tetrahedralized mesh. @@ -235,7 +248,7 @@ def mesh(self) -> Output: return self._mesh @property - def node_mapping(self) -> Output: + def node_mapping(self) -> Output[Scoping]: r"""Allows to get node_mapping output of the operator Node mapping. @@ -255,7 +268,7 @@ def node_mapping(self) -> Output: return self._node_mapping @property - def element_mapping(self) -> Output: + def element_mapping(self) -> Output[Scoping]: r"""Allows to get element_mapping output of the operator Element mapping. diff --git a/src/ansys/dpf/core/operators/mesh/meshes_provider.py b/src/ansys/dpf/core/operators/mesh/meshes_provider.py index 1cb87835832..49629aa5664 100644 --- a/src/ansys/dpf/core/operators/mesh/meshes_provider.py +++ b/src/ansys/dpf/core/operators/mesh/meshes_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class meshes_provider(Operator): r"""Reads meshes from result files. Meshes can be spatially or temporally @@ -212,19 +219,29 @@ class InputsMeshesProvider(_Inputs): def __init__(self, op: Operator): super().__init__(meshes_provider._spec().inputs, op) - self._time_scoping = Input(meshes_provider._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int] = Input( + meshes_provider._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._streams_container = Input(meshes_provider._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + meshes_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(meshes_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + meshes_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._read_cyclic = Input(meshes_provider._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + meshes_provider._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._region_scoping = Input(meshes_provider._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + meshes_provider._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int]: r"""Allows to connect time_scoping input to the operator. Time/frequency set IDs required in output. @@ -245,7 +262,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -266,7 +283,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -287,7 +304,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 1, cyclic symmetry is ignored. If 2, cyclic expansion is done (default is 1). @@ -308,7 +325,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -343,11 +360,13 @@ class OutputsMeshesProvider(_Outputs): def __init__(self, op: Operator): super().__init__(meshes_provider._spec().outputs, op) - self._meshes = Output(meshes_provider._spec().output_pin(0), 0, op) + self._meshes: Output[MeshesContainer] = Output( + meshes_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._meshes) @property - def meshes(self) -> Output: + def meshes(self) -> Output[MeshesContainer]: r"""Allows to get meshes output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/node_coordinates.py b/src/ansys/dpf/core/operators/mesh/node_coordinates.py index 4176c995662..195c6818c00 100644 --- a/src/ansys/dpf/core/operators/mesh/node_coordinates.py +++ b/src/ansys/dpf/core/operators/mesh/node_coordinates.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + class node_coordinates(Operator): r"""Returns the node coordinates of the mesh(es) in input. @@ -142,11 +147,13 @@ class InputsNodeCoordinates(_Inputs): def __init__(self, op: Operator): super().__init__(node_coordinates._spec().inputs, op) - self._mesh = Input(node_coordinates._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + node_coordinates._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/mesh/points_from_coordinates.py b/src/ansys/dpf/core/operators/mesh/points_from_coordinates.py index a59550934af..1e6ffa7134d 100644 --- a/src/ansys/dpf/core/operators/mesh/points_from_coordinates.py +++ b/src/ansys/dpf/core/operators/mesh/points_from_coordinates.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class points_from_coordinates(Operator): r"""Extract a mesh made of points elements. This mesh is made from input @@ -160,15 +167,17 @@ class InputsPointsFromCoordinates(_Inputs): def __init__(self, op: Operator): super().__init__(points_from_coordinates._spec().inputs, op) - self._nodes_to_keep = Input( + self._nodes_to_keep: Input[Scoping | ScopingsContainer] = Input( points_from_coordinates._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._nodes_to_keep) - self._mesh = Input(points_from_coordinates._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + points_from_coordinates._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) @property - def nodes_to_keep(self) -> Input: + def nodes_to_keep(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect nodes_to_keep input to the operator. Returns @@ -187,7 +196,7 @@ def nodes_to_keep(self) -> Input: return self._nodes_to_keep @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. Returns @@ -220,13 +229,13 @@ class OutputsPointsFromCoordinates(_Outputs): def __init__(self, op: Operator): super().__init__(points_from_coordinates._spec().outputs, op) - self._abstract_meshed_region = Output( + self._abstract_meshed_region: Output[MeshedRegion] = Output( points_from_coordinates._spec().output_pin(0), 0, op ) self._outputs.append(self._abstract_meshed_region) @property - def abstract_meshed_region(self) -> Output: + def abstract_meshed_region(self) -> Output[MeshedRegion]: r"""Allows to get abstract_meshed_region output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/skin.py b/src/ansys/dpf/core/operators/mesh/skin.py index dd93af55952..a318ab728ac 100644 --- a/src/ansys/dpf/core/operators/mesh/skin.py +++ b/src/ansys/dpf/core/operators/mesh/skin.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.scoping import Scoping + class skin(Operator): r"""Extracts a skin of the mesh in a new meshed region. The material ID of @@ -239,17 +245,17 @@ class InputsSkin(_Inputs): def __init__(self, op: Operator): super().__init__(skin._spec().inputs, op) - self._mesh = Input(skin._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input(skin._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(skin._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input(skin._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._duplicate_shell = Input(skin._spec().input_pin(2), 2, op, -1) + self._duplicate_shell: Input[bool] = Input(skin._spec().input_pin(2), 2, op, -1) self._inputs.append(self._duplicate_shell) - self._add_beam_point = Input(skin._spec().input_pin(3), 3, op, -1) + self._add_beam_point: Input[bool] = Input(skin._spec().input_pin(3), 3, op, -1) self._inputs.append(self._add_beam_point) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -268,7 +274,7 @@ def mesh(self) -> Input: return self._mesh @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Nodal scoping to restrict the skin extraction to a set of nodes. If provided, a skin element is added to the skin mesh if all its nodes are in the scoping. @@ -289,7 +295,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def duplicate_shell(self) -> Input: + def duplicate_shell(self) -> Input[bool]: r"""Allows to connect duplicate_shell input to the operator. If input mesh contains shell elements, output mesh shell elements (boolean = 1) are duplicated, one per each orientation, or (boolean = 0) remain unchanged. @@ -310,7 +316,7 @@ def duplicate_shell(self) -> Input: return self._duplicate_shell @property - def add_beam_point(self) -> Input: + def add_beam_point(self) -> Input[bool]: r"""Allows to connect add_beam_point input to the operator. If input mesh contains beam or point elements, output mesh beam point elements (boolean = 1) are added or (boolean = 0) are ignored. Default: False @@ -361,21 +367,27 @@ class OutputsSkin(_Outputs): def __init__(self, op: Operator): super().__init__(skin._spec().outputs, op) - self._mesh = Output(skin._spec().output_pin(0), 0, op) + self._mesh: Output[MeshedRegion] = Output(skin._spec().output_pin(0), 0, op) self._outputs.append(self._mesh) - self._nodes_mesh_scoping = Output(skin._spec().output_pin(1), 1, op) + self._nodes_mesh_scoping: Output[Scoping] = Output( + skin._spec().output_pin(1), 1, op + ) self._outputs.append(self._nodes_mesh_scoping) - self._map_new_elements_to_old = Output(skin._spec().output_pin(2), 2, op) + self._map_new_elements_to_old: Output = Output( + skin._spec().output_pin(2), 2, op + ) self._outputs.append(self._map_new_elements_to_old) - self._property_field_new_elements_to_old = Output( + self._property_field_new_elements_to_old: Output[PropertyField] = Output( skin._spec().output_pin(3), 3, op ) self._outputs.append(self._property_field_new_elements_to_old) - self._facet_indices = Output(skin._spec().output_pin(4), 4, op) + self._facet_indices: Output[PropertyField] = Output( + skin._spec().output_pin(4), 4, op + ) self._outputs.append(self._facet_indices) @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Skin meshed region with facets and facets_to_ele property fields. @@ -395,7 +407,7 @@ def mesh(self) -> Output: return self._mesh @property - def nodes_mesh_scoping(self) -> Output: + def nodes_mesh_scoping(self) -> Output[Scoping]: r"""Allows to get nodes_mesh_scoping output of the operator Returns @@ -431,7 +443,7 @@ def map_new_elements_to_old(self) -> Output: return self._map_new_elements_to_old @property - def property_field_new_elements_to_old(self) -> Output: + def property_field_new_elements_to_old(self) -> Output[PropertyField]: r"""Allows to get property_field_new_elements_to_old output of the operator This property field provides, for each new face element ID (in the scoping), the corresponding 3D volume element index (in the data) it has been extracted from. The 3D volume element ID can be found with the element scoping of the input mesh. @@ -451,7 +463,7 @@ def property_field_new_elements_to_old(self) -> Output: return self._property_field_new_elements_to_old @property - def facet_indices(self) -> Output: + def facet_indices(self) -> Output[PropertyField]: r"""Allows to get facet_indices output of the operator This property field gives, for each new face element ID (in the scoping), the corresponding face index on the source 3D volume element. The 3D volume element can be extracted from the previous output. diff --git a/src/ansys/dpf/core/operators/mesh/split_fields.py b/src/ansys/dpf/core/operators/mesh/split_fields.py index 3735a152d4b..8ff3a14a8b3 100644 --- a/src/ansys/dpf/core/operators/mesh/split_fields.py +++ b/src/ansys/dpf/core/operators/mesh/split_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshes_container import MeshesContainer + class split_fields(Operator): r"""Split the input field or fields container based on the input mesh @@ -159,15 +165,17 @@ class InputsSplitFields(_Inputs): def __init__(self, op: Operator): super().__init__(split_fields._spec().inputs, op) - self._field_or_fields_container = Input( + self._field_or_fields_container: Input[Field | FieldsContainer] = Input( split_fields._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field_or_fields_container) - self._meshes = Input(split_fields._spec().input_pin(1), 1, op, -1) + self._meshes: Input[MeshesContainer] = Input( + split_fields._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._meshes) @property - def field_or_fields_container(self) -> Input: + def field_or_fields_container(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field_or_fields_container input to the operator. Returns @@ -186,7 +194,7 @@ def field_or_fields_container(self) -> Input: return self._field_or_fields_container @property - def meshes(self) -> Input: + def meshes(self) -> Input[MeshesContainer]: r"""Allows to connect meshes input to the operator. body meshes in the mesh controller cannot be mixed shell/solid @@ -221,11 +229,13 @@ class OutputsSplitFields(_Outputs): def __init__(self, op: Operator): super().__init__(split_fields._spec().outputs, op) - self._fields_container = Output(split_fields._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + split_fields._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/split_mesh.py b/src/ansys/dpf/core/operators/mesh/split_mesh.py index 7221e0bfc52..1d0b1812bc5 100644 --- a/src/ansys/dpf/core/operators/mesh/split_mesh.py +++ b/src/ansys/dpf/core/operators/mesh/split_mesh.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + class split_mesh(Operator): r"""Split the input mesh into several meshes based on a given property @@ -173,15 +179,19 @@ class InputsSplitMesh(_Inputs): def __init__(self, op: Operator): super().__init__(split_mesh._spec().inputs, op) - self._mesh_scoping = Input(split_mesh._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + split_mesh._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._mesh = Input(split_mesh._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + split_mesh._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._property = Input(split_mesh._spec().input_pin(13), 13, op, -1) + self._property: Input[str] = Input(split_mesh._spec().input_pin(13), 13, op, -1) self._inputs.append(self._property) @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Scoping @@ -202,7 +212,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -221,7 +231,7 @@ def mesh(self) -> Input: return self._mesh @property - def property(self) -> Input: + def property(self) -> Input[str]: r"""Allows to connect property input to the operator. Returns @@ -254,11 +264,13 @@ class OutputsSplitMesh(_Outputs): def __init__(self, op: Operator): super().__init__(split_mesh._spec().outputs, op) - self._meshes = Output(split_mesh._spec().output_pin(0), 0, op) + self._meshes: Output[MeshesContainer] = Output( + split_mesh._spec().output_pin(0), 0, op + ) self._outputs.append(self._meshes) @property - def meshes(self) -> Output: + def meshes(self) -> Output[MeshesContainer]: r"""Allows to get meshes output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/stl_export.py b/src/ansys/dpf/core/operators/mesh/stl_export.py index a0e91b691b2..f9b7c912b8c 100644 --- a/src/ansys/dpf/core/operators/mesh/stl_export.py +++ b/src/ansys/dpf/core/operators/mesh/stl_export.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.meshed_region import MeshedRegion + class stl_export(Operator): r"""export a mesh into a stl file. @@ -154,13 +159,15 @@ class InputsStlExport(_Inputs): def __init__(self, op: Operator): super().__init__(stl_export._spec().inputs, op) - self._mesh = Input(stl_export._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + stl_export._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._file_path = Input(stl_export._spec().input_pin(1), 1, op, -1) + self._file_path: Input[str] = Input(stl_export._spec().input_pin(1), 1, op, -1) self._inputs.append(self._file_path) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -179,7 +186,7 @@ def mesh(self) -> Input: return self._mesh @property - def file_path(self) -> Input: + def file_path(self) -> Input[str]: r"""Allows to connect file_path input to the operator. Returns @@ -212,11 +219,13 @@ class OutputsStlExport(_Outputs): def __init__(self, op: Operator): super().__init__(stl_export._spec().outputs, op) - self._data_sources = Output(stl_export._spec().output_pin(0), 0, op) + self._data_sources: Output[DataSources] = Output( + stl_export._spec().output_pin(0), 0, op + ) self._outputs.append(self._data_sources) @property - def data_sources(self) -> Output: + def data_sources(self) -> Output[DataSources]: r"""Allows to get data_sources output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/tri_mesh_skin.py b/src/ansys/dpf/core/operators/mesh/tri_mesh_skin.py index 59c437b6320..5cc56ef8f6b 100644 --- a/src/ansys/dpf/core/operators/mesh/tri_mesh_skin.py +++ b/src/ansys/dpf/core/operators/mesh/tri_mesh_skin.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class tri_mesh_skin(Operator): r"""Extracts a skin of the mesh in triangles in a new meshed region. @@ -189,15 +194,21 @@ class InputsTriMeshSkin(_Inputs): def __init__(self, op: Operator): super().__init__(tri_mesh_skin._spec().inputs, op) - self._mesh = Input(tri_mesh_skin._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + tri_mesh_skin._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._include_surfaces = Input(tri_mesh_skin._spec().input_pin(1), 1, op, -1) + self._include_surfaces: Input[bool] = Input( + tri_mesh_skin._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._include_surfaces) - self._mesh_scoping = Input(tri_mesh_skin._spec().input_pin(2), 2, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + tri_mesh_skin._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._mesh_scoping) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -216,7 +227,7 @@ def mesh(self) -> Input: return self._mesh @property - def include_surfaces(self) -> Input: + def include_surfaces(self) -> Input[bool]: r"""Allows to connect include_surfaces input to the operator. True: meshing will also take into account shell and skin elements. False: meshing will ignore shell and skin elements. The default is false. @@ -237,7 +248,7 @@ def include_surfaces(self) -> Input: return self._include_surfaces @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Nodal scoping to restrict the skin extraction to a set of nodes. If provided, a skin element is added to the skin mesh if all its nodes are in the scoping. @@ -273,13 +284,17 @@ class OutputsTriMeshSkin(_Outputs): def __init__(self, op: Operator): super().__init__(tri_mesh_skin._spec().outputs, op) - self._mesh = Output(tri_mesh_skin._spec().output_pin(0), 0, op) + self._mesh: Output[MeshedRegion] = Output( + tri_mesh_skin._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh) - self._nodes_mesh_scoping = Output(tri_mesh_skin._spec().output_pin(1), 1, op) + self._nodes_mesh_scoping: Output[Scoping] = Output( + tri_mesh_skin._spec().output_pin(1), 1, op + ) self._outputs.append(self._nodes_mesh_scoping) @property - def mesh(self) -> Output: + def mesh(self) -> Output[MeshedRegion]: r"""Allows to get mesh output of the operator Returns @@ -297,7 +312,7 @@ def mesh(self) -> Output: return self._mesh @property - def nodes_mesh_scoping(self) -> Output: + def nodes_mesh_scoping(self) -> Output[Scoping]: r"""Allows to get nodes_mesh_scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/mesh/wireframe.py b/src/ansys/dpf/core/operators/mesh/wireframe.py index 87e32ef40b9..86c5f7209aa 100644 --- a/src/ansys/dpf/core/operators/mesh/wireframe.py +++ b/src/ansys/dpf/core/operators/mesh/wireframe.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + class wireframe(Operator): r"""Take a mesh and extracts its sharp edges, using pin 1 value as a @@ -157,13 +161,15 @@ class InputsWireframe(_Inputs): def __init__(self, op: Operator): super().__init__(wireframe._spec().inputs, op) - self._mesh = Input(wireframe._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + wireframe._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._threshold = Input(wireframe._spec().input_pin(1), 1, op, -1) + self._threshold: Input[float] = Input(wireframe._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -182,7 +188,7 @@ def mesh(self) -> Input: return self._mesh @property - def threshold(self) -> Input: + def threshold(self) -> Input[float]: r"""Allows to connect threshold input to the operator. angle threshold in radian that will trigger an edge detection. @@ -217,11 +223,13 @@ class OutputsWireframe(_Outputs): def __init__(self, op: Operator): super().__init__(wireframe._spec().outputs, op) - self._wireframe = Output(wireframe._spec().output_pin(0), 0, op) + self._wireframe: Output[MeshedRegion] = Output( + wireframe._spec().output_pin(0), 0, op + ) self._outputs.append(self._wireframe) @property - def wireframe(self) -> Output: + def wireframe(self) -> Output[MeshedRegion]: r"""Allows to get wireframe output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/boundary_condition_provider.py b/src/ansys/dpf/core/operators/metadata/boundary_condition_provider.py index 956e60215b9..87486a29bc4 100644 --- a/src/ansys/dpf/core/operators/metadata/boundary_condition_provider.py +++ b/src/ansys/dpf/core/operators/metadata/boundary_condition_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + class boundary_condition_provider(Operator): r"""Reads boundary conditions from the results files contained in the @@ -160,17 +165,17 @@ class InputsBoundaryConditionProvider(_Inputs): def __init__(self, op: Operator): super().__init__(boundary_condition_provider._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( boundary_condition_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( boundary_condition_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -189,7 +194,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/metadata/coordinate_system_data_provider.py b/src/ansys/dpf/core/operators/metadata/coordinate_system_data_provider.py index 84f08b51986..020ae8693d8 100644 --- a/src/ansys/dpf/core/operators/metadata/coordinate_system_data_provider.py +++ b/src/ansys/dpf/core/operators/metadata/coordinate_system_data_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.generic_data_container import GenericDataContainer + from ansys.dpf.core.streams_container import StreamsContainer + class coordinate_system_data_provider(Operator): r"""Reads coordinate systems data from the result files contained in the @@ -194,21 +200,21 @@ class InputsCoordinateSystemDataProvider(_Inputs): def __init__(self, op: Operator): super().__init__(coordinate_system_data_provider._spec().inputs, op) - self._solver_coordinate_system_ids = Input( + self._solver_coordinate_system_ids: Input[int] = Input( coordinate_system_data_provider._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._solver_coordinate_system_ids) - self._streams = Input( + self._streams: Input[StreamsContainer] = Input( coordinate_system_data_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( coordinate_system_data_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def solver_coordinate_system_ids(self) -> Input: + def solver_coordinate_system_ids(self) -> Input[int]: r"""Allows to connect solver_coordinate_system_ids input to the operator. Coorfinate System ids to recover used by the solver. If not set, all available materials to be recovered. @@ -229,7 +235,7 @@ def solver_coordinate_system_ids(self) -> Input: return self._solver_coordinate_system_ids @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Result file container allowed to be kept open to cache data. @@ -250,7 +256,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Result file path container, used if no streams are set. @@ -286,17 +292,17 @@ class OutputsCoordinateSystemDataProvider(_Outputs): def __init__(self, op: Operator): super().__init__(coordinate_system_data_provider._spec().outputs, op) - self._coordinate_system_data1 = Output( + self._coordinate_system_data1: Output[GenericDataContainer] = Output( coordinate_system_data_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._coordinate_system_data1) - self._coordinate_system_data2 = Output( + self._coordinate_system_data2: Output[GenericDataContainer] = Output( coordinate_system_data_provider._spec().output_pin(1), 1, op ) self._outputs.append(self._coordinate_system_data2) @property - def coordinate_system_data1(self) -> Output: + def coordinate_system_data1(self) -> Output[GenericDataContainer]: r"""Allows to get coordinate_system_data1 output of the operator Returns @@ -314,7 +320,7 @@ def coordinate_system_data1(self) -> Output: return self._coordinate_system_data1 @property - def coordinate_system_data2(self) -> Output: + def coordinate_system_data2(self) -> Output[GenericDataContainer]: r"""Allows to get coordinate_system_data2 output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py b/src/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py index e93b064ecaf..88791c0cecf 100644 --- a/src/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py +++ b/src/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.cyclic_support import CyclicSupport + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class cyclic_mesh_expansion(Operator): r"""Expand the mesh. @@ -186,21 +194,21 @@ class InputsCyclicMeshExpansion(_Inputs): def __init__(self, op: Operator): super().__init__(cyclic_mesh_expansion._spec().inputs, op) - self._sector_meshed_region = Input( + self._sector_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( cyclic_mesh_expansion._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._sector_meshed_region) - self._cyclic_support = Input( + self._cyclic_support: Input[CyclicSupport] = Input( cyclic_mesh_expansion._spec().input_pin(16), 16, op, -1 ) self._inputs.append(self._cyclic_support) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( cyclic_mesh_expansion._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) @property - def sector_meshed_region(self) -> Input: + def sector_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect sector_meshed_region input to the operator. Returns @@ -219,7 +227,7 @@ def sector_meshed_region(self) -> Input: return self._sector_meshed_region @property - def cyclic_support(self) -> Input: + def cyclic_support(self) -> Input[CyclicSupport]: r"""Allows to connect cyclic_support input to the operator. Returns @@ -238,7 +246,7 @@ def cyclic_support(self) -> Input: return self._cyclic_support @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. @@ -274,15 +282,17 @@ class OutputsCyclicMeshExpansion(_Outputs): def __init__(self, op: Operator): super().__init__(cyclic_mesh_expansion._spec().outputs, op) - self._meshed_region = Output(cyclic_mesh_expansion._spec().output_pin(0), 0, op) + self._meshed_region: Output[MeshedRegion] = Output( + cyclic_mesh_expansion._spec().output_pin(0), 0, op + ) self._outputs.append(self._meshed_region) - self._cyclic_support = Output( + self._cyclic_support: Output[CyclicSupport] = Output( cyclic_mesh_expansion._spec().output_pin(1), 1, op ) self._outputs.append(self._cyclic_support) @property - def meshed_region(self) -> Output: + def meshed_region(self) -> Output[MeshedRegion]: r"""Allows to get meshed_region output of the operator expanded meshed region. @@ -302,7 +312,7 @@ def meshed_region(self) -> Output: return self._meshed_region @property - def cyclic_support(self) -> Output: + def cyclic_support(self) -> Output[CyclicSupport]: r"""Allows to get cyclic_support output of the operator input cyclic support modified in place containing the new expanded meshed regions. diff --git a/src/ansys/dpf/core/operators/metadata/cyclic_support_provider.py b/src/ansys/dpf/core/operators/metadata/cyclic_support_provider.py index 0f7ee96b3bf..14df1359d1d 100644 --- a/src/ansys/dpf/core/operators/metadata/cyclic_support_provider.py +++ b/src/ansys/dpf/core/operators/metadata/cyclic_support_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.core import errors @@ -15,6 +16,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.cyclic_support import CyclicSupport + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class cyclic_support_provider(Operator): r"""Read the cyclic support (DPF entity containing necessary information for @@ -233,29 +243,29 @@ class InputsCyclicSupportProvider(_Inputs): def __init__(self, op: Operator): super().__init__(cyclic_support_provider._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( cyclic_support_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( cyclic_support_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._sector_meshed_region = Input( + self._sector_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( cyclic_support_provider._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._sector_meshed_region) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( cyclic_support_provider._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( cyclic_support_provider._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Streams containing the result file. @@ -276,7 +286,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. data sources containing the result file. @@ -297,7 +307,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def sector_meshed_region(self) -> Input: + def sector_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect sector_meshed_region input to the operator. mesh of the first sector. @@ -318,7 +328,7 @@ def sector_meshed_region(self) -> Input: return self._sector_meshed_region @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. if this pin is set, expanding the mesh is not necessary. @@ -339,7 +349,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. @@ -375,17 +385,17 @@ class OutputsCyclicSupportProvider(_Outputs): def __init__(self, op: Operator): super().__init__(cyclic_support_provider._spec().outputs, op) - self._cyclic_support = Output( + self._cyclic_support: Output[CyclicSupport] = Output( cyclic_support_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._cyclic_support) - self._sector_meshes = Output( + self._sector_meshes: Output[MeshesContainer] = Output( cyclic_support_provider._spec().output_pin(1), 1, op ) self._outputs.append(self._sector_meshes) @property - def cyclic_support(self) -> Output: + def cyclic_support(self) -> Output[CyclicSupport]: r"""Allows to get cyclic_support output of the operator Returns @@ -403,7 +413,7 @@ def cyclic_support(self) -> Output: return self._cyclic_support @property - def sector_meshes(self) -> Output: + def sector_meshes(self) -> Output[MeshesContainer]: r"""Allows to get sector_meshes output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/datasources_provider.py b/src/ansys/dpf/core/operators/metadata/datasources_provider.py index e888283c807..0db37aacb6f 100644 --- a/src/ansys/dpf/core/operators/metadata/datasources_provider.py +++ b/src/ansys/dpf/core/operators/metadata/datasources_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + class datasources_provider(Operator): r"""Creates a DataSources by expanding another. @@ -140,11 +144,13 @@ class InputsDatasourcesProvider(_Inputs): def __init__(self, op: Operator): super().__init__(datasources_provider._spec().inputs, op) - self._data_sources = Input(datasources_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + datasources_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -177,11 +183,13 @@ class OutputsDatasourcesProvider(_Outputs): def __init__(self, op: Operator): super().__init__(datasources_provider._spec().outputs, op) - self._data_sources = Output(datasources_provider._spec().output_pin(0), 0, op) + self._data_sources: Output[DataSources] = Output( + datasources_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._data_sources) @property - def data_sources(self) -> Output: + def data_sources(self) -> Output[DataSources]: r"""Allows to get data_sources output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/element_types_provider.py b/src/ansys/dpf/core/operators/metadata/element_types_provider.py index efde8f71489..0849db7975e 100644 --- a/src/ansys/dpf/core/operators/metadata/element_types_provider.py +++ b/src/ansys/dpf/core/operators/metadata/element_types_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + class element_types_provider(Operator): r"""Reads element types data from the result files contained in the streams @@ -219,23 +224,25 @@ class InputsElementTypesProvider(_Inputs): def __init__(self, op: Operator): super().__init__(element_types_provider._spec().inputs, op) - self._solver_element_types_ids = Input( + self._solver_element_types_ids: Input[int] = Input( element_types_provider._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._solver_element_types_ids) - self._streams = Input(element_types_provider._spec().input_pin(3), 3, op, -1) + self._streams: Input[StreamsContainer] = Input( + element_types_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( element_types_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._output_type = Input( + self._output_type: Input[int] = Input( element_types_provider._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._output_type) @property - def solver_element_types_ids(self) -> Input: + def solver_element_types_ids(self) -> Input[int]: r"""Allows to connect solver_element_types_ids input to the operator. Element Type ids to recover used by the solver. If not set, all available element types are recovered. @@ -256,7 +263,7 @@ def solver_element_types_ids(self) -> Input: return self._solver_element_types_ids @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Result file container allowed to be kept open to cache data. @@ -277,7 +284,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Result file path container, used if no streams are set. @@ -298,7 +305,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def output_type(self) -> Input: + def output_type(self) -> Input[int]: r"""Allows to connect output_type input to the operator. Get the output as a GenericDataContainer (pin value 1, default) or as a PropertyField (pin value 2). diff --git a/src/ansys/dpf/core/operators/metadata/integrate_over_time_freq.py b/src/ansys/dpf/core/operators/metadata/integrate_over_time_freq.py index 8c2f1178988..d263a005a66 100644 --- a/src/ansys/dpf/core/operators/metadata/integrate_over_time_freq.py +++ b/src/ansys/dpf/core/operators/metadata/integrate_over_time_freq.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class integrate_over_time_freq(Operator): r"""Integration of an input field over timefreq. @@ -172,17 +178,21 @@ class InputsIntegrateOverTimeFreq(_Inputs): def __init__(self, op: Operator): super().__init__(integrate_over_time_freq._spec().inputs, op) - self._field = Input(integrate_over_time_freq._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input( + integrate_over_time_freq._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._scoping = Input(integrate_over_time_freq._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + integrate_over_time_freq._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( integrate_over_time_freq._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._time_freq_support) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -201,7 +211,7 @@ def field(self) -> Input: return self._field @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Integrate the input field over a specific scoping. @@ -222,7 +232,7 @@ def scoping(self) -> Input: return self._scoping @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Time Freq to integrate on, otherwise time freq support from the input field is taken. @@ -257,11 +267,13 @@ class OutputsIntegrateOverTimeFreq(_Outputs): def __init__(self, op: Operator): super().__init__(integrate_over_time_freq._spec().outputs, op) - self._field = Output(integrate_over_time_freq._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + integrate_over_time_freq._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/is_cyclic.py b/src/ansys/dpf/core/operators/metadata/is_cyclic.py index a8a32820b99..d15a030a2cf 100644 --- a/src/ansys/dpf/core/operators/metadata/is_cyclic.py +++ b/src/ansys/dpf/core/operators/metadata/is_cyclic.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + class is_cyclic(Operator): r"""Reads if the model is cyclic from the result file. @@ -159,13 +164,17 @@ class InputsIsCyclic(_Inputs): def __init__(self, op: Operator): super().__init__(is_cyclic._spec().inputs, op) - self._streams_container = Input(is_cyclic._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + is_cyclic._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(is_cyclic._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + is_cyclic._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. streams (result file container) (optional) @@ -186,7 +195,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. If the stream is null, retrieves the file path from the data sources. @@ -221,11 +230,11 @@ class OutputsIsCyclic(_Outputs): def __init__(self, op: Operator): super().__init__(is_cyclic._spec().outputs, op) - self._file_path = Output(is_cyclic._spec().output_pin(0), 0, op) + self._file_path: Output[str] = Output(is_cyclic._spec().output_pin(0), 0, op) self._outputs.append(self._file_path) @property - def file_path(self) -> Output: + def file_path(self) -> Output[str]: r"""Allows to get file_path output of the operator returns 'single_stage' or 'multi_stage' or an empty string for non cyclic model diff --git a/src/ansys/dpf/core/operators/metadata/material_support_provider.py b/src/ansys/dpf/core/operators/metadata/material_support_provider.py index a184e409e22..3cd31e5182e 100644 --- a/src/ansys/dpf/core/operators/metadata/material_support_provider.py +++ b/src/ansys/dpf/core/operators/metadata/material_support_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + class material_support_provider(Operator): r"""Reads the material support. @@ -158,17 +163,17 @@ class InputsMaterialSupportProvider(_Inputs): def __init__(self, op: Operator): super().__init__(material_support_provider._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( material_support_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( material_support_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Streams result file container (optional). @@ -189,7 +194,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. if the stream is null, get the file path from the data sources. @@ -224,7 +229,7 @@ class OutputsMaterialSupportProvider(_Outputs): def __init__(self, op: Operator): super().__init__(material_support_provider._spec().outputs, op) - self._abstract_field_support = Output( + self._abstract_field_support: Output = Output( material_support_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._abstract_field_support) diff --git a/src/ansys/dpf/core/operators/metadata/mesh_info_provider.py b/src/ansys/dpf/core/operators/metadata/mesh_info_provider.py index 1f0e381f405..42e643e0b4e 100644 --- a/src/ansys/dpf/core/operators/metadata/mesh_info_provider.py +++ b/src/ansys/dpf/core/operators/metadata/mesh_info_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.generic_data_container import GenericDataContainer + from ansys.dpf.core.streams_container import StreamsContainer + class mesh_info_provider(Operator): r"""Reads the mesh information, such as number of elements (common @@ -183,17 +189,21 @@ class InputsMeshInfoProvider(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_info_provider._spec().inputs, op) - self._time_scoping = Input(mesh_info_provider._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[int] = Input( + mesh_info_provider._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( mesh_info_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(mesh_info_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + mesh_info_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[int]: r"""Allows to connect time_scoping input to the operator. Optional time/frequency set ID of the mesh. @@ -214,7 +224,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. streams (mesh file container) (optional) @@ -235,7 +245,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. If the stream is null, retrieves the file path from the data sources. @@ -270,11 +280,13 @@ class OutputsMeshInfoProvider(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_info_provider._spec().outputs, op) - self._mesh_info = Output(mesh_info_provider._spec().output_pin(0), 0, op) + self._mesh_info: Output[GenericDataContainer] = Output( + mesh_info_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_info) @property - def mesh_info(self) -> Output: + def mesh_info(self) -> Output[GenericDataContainer]: r"""Allows to get mesh_info output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/mesh_property_provider.py b/src/ansys/dpf/core/operators/metadata/mesh_property_provider.py index 79b5d884ad9..eb5d8e619e0 100644 --- a/src/ansys/dpf/core/operators/metadata/mesh_property_provider.py +++ b/src/ansys/dpf/core/operators/metadata/mesh_property_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class mesh_property_provider(Operator): r"""Reads a property related to the mesh, defined by its name, by calling @@ -216,29 +222,29 @@ class InputsMeshPropertyProvider(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_property_provider._spec().inputs, op) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( mesh_property_provider._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( mesh_property_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( mesh_property_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._property_name = Input( + self._property_name: Input[str] = Input( mesh_property_provider._spec().input_pin(13), 13, op, -1 ) self._inputs.append(self._property_name) - self._property_identifier = Input( + self._property_identifier: Input[int | str] = Input( mesh_property_provider._spec().input_pin(17), 17, op, -1 ) self._inputs.append(self._property_identifier) @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Retrieves a property field on a subset of elements or nodes. @@ -259,7 +265,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. streams (result file container) (optional) @@ -280,7 +286,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. If the stream is null, retrieves the file path from the data sources. @@ -301,7 +307,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Supported property names are: "mat", "named_selection", "named_selection_names", "apdl_element_type", "section", "elprops", "keyopt_1" to "keyopt_18". @@ -322,7 +328,7 @@ def property_name(self) -> Input: return self._property_name @property - def property_identifier(self) -> Input: + def property_identifier(self) -> Input[int | str]: r"""Allows to connect property_identifier input to the operator. Retrieves a property at a given index or by name. For example, a named selection's number or a named selection's name. diff --git a/src/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py b/src/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py index c64ee304aa8..d3f676912a1 100644 --- a/src/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py +++ b/src/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + class mesh_selection_manager_provider(Operator): r"""Reads mesh properties from the results files contained in the streams or @@ -166,17 +171,17 @@ class InputsMeshSelectionManagerProvider(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_selection_manager_provider._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( mesh_selection_manager_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( mesh_selection_manager_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. streams (result file container) (optional) @@ -197,7 +202,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. If the stream is null, retrieves the file path from the data sources. @@ -232,7 +237,7 @@ class OutputsMeshSelectionManagerProvider(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_selection_manager_provider._spec().outputs, op) - self._mesh_selection_manager = Output( + self._mesh_selection_manager: Output = Output( mesh_selection_manager_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._mesh_selection_manager) diff --git a/src/ansys/dpf/core/operators/metadata/mesh_support_provider.py b/src/ansys/dpf/core/operators/metadata/mesh_support_provider.py index 19d7e0c8dad..77336ed8aeb 100644 --- a/src/ansys/dpf/core/operators/metadata/mesh_support_provider.py +++ b/src/ansys/dpf/core/operators/metadata/mesh_support_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + class mesh_support_provider(Operator): r"""Reads the mesh support. @@ -158,17 +163,17 @@ class InputsMeshSupportProvider(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_support_provider._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( mesh_support_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( mesh_support_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Streams (result file container) (optional). @@ -189,7 +194,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. If the stream is null, retrieves the file path from the data sources. @@ -224,7 +229,7 @@ class OutputsMeshSupportProvider(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_support_provider._spec().outputs, op) - self._abstract_field_support = Output( + self._abstract_field_support: Output = Output( mesh_support_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._abstract_field_support) diff --git a/src/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py b/src/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py index a624fc5e702..d7c4b55fb98 100644 --- a/src/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py +++ b/src/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class property_field_provider_by_name(Operator): r"""Provides the property values for a set of elements for a defined @@ -201,25 +208,25 @@ class InputsPropertyFieldProviderByName(_Inputs): def __init__(self, op: Operator): super().__init__(property_field_provider_by_name._spec().inputs, op) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( property_field_provider_by_name._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( property_field_provider_by_name._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( property_field_provider_by_name._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._property_name = Input( + self._property_name: Input[str] = Input( property_field_provider_by_name._spec().input_pin(13), 13, op, -1 ) self._inputs.append(self._property_name) @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. scoping that defines the set of elements to fetch the property values for. If not specified, applied on all the elements of the mesh. @@ -240,7 +247,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. optional if using a dataSources @@ -261,7 +268,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. optional if using a streamsContainer @@ -282,7 +289,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. property to read, that can be the following: elements_connectivity, nodes_connectivity, material, element_type, apdl_section_id, apdl_real_id, apdl_esys_id, mapdl_element_type, mapdl_element_type_id, harmonic_index, step, substep, keyopt_i (i = 1 -> 18). @@ -317,13 +324,13 @@ class OutputsPropertyFieldProviderByName(_Outputs): def __init__(self, op: Operator): super().__init__(property_field_provider_by_name._spec().outputs, op) - self._property_field = Output( + self._property_field: Output[PropertyField] = Output( property_field_provider_by_name._spec().output_pin(0), 0, op ) self._outputs.append(self._property_field) @property - def property_field(self) -> Output: + def property_field(self) -> Output[PropertyField]: r"""Allows to get property_field output of the operator property field diff --git a/src/ansys/dpf/core/operators/metadata/real_constants_provider.py b/src/ansys/dpf/core/operators/metadata/real_constants_provider.py index 79c8b375bb1..c6166d7035b 100644 --- a/src/ansys/dpf/core/operators/metadata/real_constants_provider.py +++ b/src/ansys/dpf/core/operators/metadata/real_constants_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.streams_container import StreamsContainer + class real_constants_provider(Operator): r"""Reads real constants from the result files contained in the streams or @@ -180,19 +186,21 @@ class InputsRealConstantsProvider(_Inputs): def __init__(self, op: Operator): super().__init__(real_constants_provider._spec().inputs, op) - self._solver_real_constants_ids = Input( + self._solver_real_constants_ids: Input[int] = Input( real_constants_provider._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._solver_real_constants_ids) - self._streams = Input(real_constants_provider._spec().input_pin(3), 3, op, -1) + self._streams: Input[StreamsContainer] = Input( + real_constants_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( real_constants_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def solver_real_constants_ids(self) -> Input: + def solver_real_constants_ids(self) -> Input[int]: r"""Allows to connect solver_real_constants_ids input to the operator. Real Constant ids to recover used by the solver. If not set, all available real constants to be recovered. @@ -213,7 +221,7 @@ def solver_real_constants_ids(self) -> Input: return self._solver_real_constants_ids @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Result file container allowed to be kept open to cache data. @@ -234,7 +242,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Result file path container, used if no streams are set. @@ -269,13 +277,13 @@ class OutputsRealConstantsProvider(_Outputs): def __init__(self, op: Operator): super().__init__(real_constants_provider._spec().outputs, op) - self._real_constants = Output( + self._real_constants: Output[Field] = Output( real_constants_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._real_constants) @property - def real_constants(self) -> Output: + def real_constants(self) -> Output[Field]: r"""Allows to get real_constants output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/result_info_provider.py b/src/ansys/dpf/core/operators/metadata/result_info_provider.py index c3714c8453a..13d1a28abd4 100644 --- a/src/ansys/dpf/core/operators/metadata/result_info_provider.py +++ b/src/ansys/dpf/core/operators/metadata/result_info_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.result_info import ResultInfo + from ansys.dpf.core.streams_container import StreamsContainer + class result_info_provider(Operator): r"""Reads the result information, such as available results or unit systems @@ -160,15 +166,17 @@ class InputsResultInfoProvider(_Inputs): def __init__(self, op: Operator): super().__init__(result_info_provider._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( result_info_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(result_info_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + result_info_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. streams (result file container) (optional) @@ -189,7 +197,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. If the stream is null, retrieves the file path from the data sources. @@ -224,11 +232,13 @@ class OutputsResultInfoProvider(_Outputs): def __init__(self, op: Operator): super().__init__(result_info_provider._spec().outputs, op) - self._result_info = Output(result_info_provider._spec().output_pin(0), 0, op) + self._result_info: Output[ResultInfo] = Output( + result_info_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._result_info) @property - def result_info(self) -> Output: + def result_info(self) -> Output[ResultInfo]: r"""Allows to get result_info output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/streams_provider.py b/src/ansys/dpf/core/operators/metadata/streams_provider.py index b9273ab7361..875ef07dcdb 100644 --- a/src/ansys/dpf/core/operators/metadata/streams_provider.py +++ b/src/ansys/dpf/core/operators/metadata/streams_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + class streams_provider(Operator): r"""Creates streams (files with cache) from the data sources. @@ -140,11 +145,13 @@ class InputsStreamsProvider(_Inputs): def __init__(self, op: Operator): super().__init__(streams_provider._spec().inputs, op) - self._data_sources = Input(streams_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + streams_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -177,11 +184,13 @@ class OutputsStreamsProvider(_Outputs): def __init__(self, op: Operator): super().__init__(streams_provider._spec().outputs, op) - self._streams_container = Output(streams_provider._spec().output_pin(0), 0, op) + self._streams_container: Output[StreamsContainer] = Output( + streams_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._streams_container) @property - def streams_container(self) -> Output: + def streams_container(self) -> Output[StreamsContainer]: r"""Allows to get streams_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/time_freq_provider.py b/src/ansys/dpf/core/operators/metadata/time_freq_provider.py index fe3b6783295..c79c1877c23 100644 --- a/src/ansys/dpf/core/operators/metadata/time_freq_provider.py +++ b/src/ansys/dpf/core/operators/metadata/time_freq_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class time_freq_provider(Operator): r"""Reads the time/frequency support from the results files contained in the @@ -162,15 +168,17 @@ class InputsTimeFreqProvider(_Inputs): def __init__(self, op: Operator): super().__init__(time_freq_provider._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( time_freq_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(time_freq_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + time_freq_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. streams (result file container) (optional) @@ -191,7 +199,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. If the stream is null, retrieves the file path from the data sources. @@ -226,13 +234,13 @@ class OutputsTimeFreqProvider(_Outputs): def __init__(self, op: Operator): super().__init__(time_freq_provider._spec().outputs, op) - self._time_freq_support = Output( + self._time_freq_support: Output[TimeFreqSupport] = Output( time_freq_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._time_freq_support) @property - def time_freq_support(self) -> Output: + def time_freq_support(self) -> Output[TimeFreqSupport]: r"""Allows to get time_freq_support output of the operator Returns diff --git a/src/ansys/dpf/core/operators/metadata/time_freq_support_get_attribute.py b/src/ansys/dpf/core/operators/metadata/time_freq_support_get_attribute.py index 9cc7f319612..45d6121d646 100644 --- a/src/ansys/dpf/core/operators/metadata/time_freq_support_get_attribute.py +++ b/src/ansys/dpf/core/operators/metadata/time_freq_support_get_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class time_freq_support_get_attribute(Operator): r"""Uses the TimeFreqSupport APIs to return a given attribute of the scoping @@ -201,25 +206,25 @@ class InputsTimeFreqSupportGetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(time_freq_support_get_attribute._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( time_freq_support_get_attribute._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._property_name = Input( + self._property_name: Input[str] = Input( time_freq_support_get_attribute._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._property_name) - self._property_identifier = Input( + self._property_identifier: Input[int | Scoping] = Input( time_freq_support_get_attribute._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._property_identifier) - self._property_identifier_2 = Input( + self._property_identifier_2: Input[int] = Input( time_freq_support_get_attribute._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._property_identifier_2) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -238,7 +243,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Supported property names are: "time_freqs", "imaginary_freqs", "frequency_tolerance", "set_id", "cummulative_index", "sets_freqs", "step_id_from_harmonic_index". @@ -259,7 +264,7 @@ def property_name(self) -> Input: return self._property_name @property - def property_identifier(self) -> Input: + def property_identifier(self) -> Input[int | Scoping]: r"""Allows to connect property_identifier input to the operator. Additional pin for properties "set_id" and "cummulative_index": the step id, for "sets_freqs": the sets scoping, for "step_id_from_harmonic_index" : the harmonic index. @@ -280,7 +285,7 @@ def property_identifier(self) -> Input: return self._property_identifier @property - def property_identifier_2(self) -> Input: + def property_identifier_2(self) -> Input[int]: r"""Allows to connect property_identifier_2 input to the operator. Additional pin for properties "set_id" and "cummulative_index": the substep id (if none, last substep is considered). diff --git a/src/ansys/dpf/core/operators/min_max/max_by_component.py b/src/ansys/dpf/core/operators/min_max/max_by_component.py index 66f129ba537..6fad14945a8 100644 --- a/src/ansys/dpf/core/operators/min_max/max_by_component.py +++ b/src/ansys/dpf/core/operators/min_max/max_by_component.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class max_by_component(Operator): r"""Give the maximum for each element rank by comparing several fields. @@ -178,17 +183,21 @@ class InputsMaxByComponent(_Inputs): def __init__(self, op: Operator): super().__init__(max_by_component._spec().inputs, op) - self._use_absolute_value = Input( + self._use_absolute_value: Input[bool] = Input( max_by_component._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._use_absolute_value) - self._field1 = Input(max_by_component._spec().input_pin(1), 1, op, 0) + self._field1: Input[Field | FieldsContainer] = Input( + max_by_component._spec().input_pin(1), 1, op, 0 + ) self._inputs.append(self._field1) - self._field2 = Input(max_by_component._spec().input_pin(2), 2, op, 1) + self._field2: Input[Field | FieldsContainer] = Input( + max_by_component._spec().input_pin(2), 2, op, 1 + ) self._inputs.append(self._field2) @property - def use_absolute_value(self) -> Input: + def use_absolute_value(self) -> Input[bool]: r"""Allows to connect use_absolute_value input to the operator. use_absolute_value @@ -209,7 +218,7 @@ def use_absolute_value(self) -> Input: return self._use_absolute_value @property - def field1(self) -> Input: + def field1(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field1 input to the operator. field or fields container with only one field is expected @@ -230,7 +239,7 @@ def field1(self) -> Input: return self._field1 @property - def field2(self) -> Input: + def field2(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field2 input to the operator. field or fields container with only one field is expected @@ -265,11 +274,13 @@ class OutputsMaxByComponent(_Outputs): def __init__(self, op: Operator): super().__init__(max_by_component._spec().outputs, op) - self._field = Output(max_by_component._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + max_by_component._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/max_over_phase.py b/src/ansys/dpf/core/operators/min_max/max_over_phase.py index 351faea68a7..9b0c583c3d2 100644 --- a/src/ansys/dpf/core/operators/min_max/max_over_phase.py +++ b/src/ansys/dpf/core/operators/min_max/max_over_phase.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class max_over_phase(Operator): r"""Returns, for each entity, the maximum value of (real value \* cos(theta) @@ -196,17 +200,25 @@ class InputsMaxOverPhase(_Inputs): def __init__(self, op: Operator): super().__init__(max_over_phase._spec().inputs, op) - self._real_field = Input(max_over_phase._spec().input_pin(0), 0, op, -1) + self._real_field: Input[Field] = Input( + max_over_phase._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._real_field) - self._imaginary_field = Input(max_over_phase._spec().input_pin(1), 1, op, -1) + self._imaginary_field: Input[Field] = Input( + max_over_phase._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._imaginary_field) - self._abs_value = Input(max_over_phase._spec().input_pin(2), 2, op, -1) + self._abs_value: Input[bool] = Input( + max_over_phase._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._abs_value) - self._phase_increment = Input(max_over_phase._spec().input_pin(3), 3, op, -1) + self._phase_increment: Input[float] = Input( + max_over_phase._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._phase_increment) @property - def real_field(self) -> Input: + def real_field(self) -> Input[Field]: r"""Allows to connect real_field input to the operator. Returns @@ -225,7 +237,7 @@ def real_field(self) -> Input: return self._real_field @property - def imaginary_field(self) -> Input: + def imaginary_field(self) -> Input[Field]: r"""Allows to connect imaginary_field input to the operator. Returns @@ -244,7 +256,7 @@ def imaginary_field(self) -> Input: return self._imaginary_field @property - def abs_value(self) -> Input: + def abs_value(self) -> Input[bool]: r"""Allows to connect abs_value input to the operator. Should use absolute value. @@ -265,7 +277,7 @@ def abs_value(self) -> Input: return self._abs_value @property - def phase_increment(self) -> Input: + def phase_increment(self) -> Input[float]: r"""Allows to connect phase_increment input to the operator. Phase increment (default is 10.0 degrees). @@ -300,11 +312,11 @@ class OutputsMaxOverPhase(_Outputs): def __init__(self, op: Operator): super().__init__(max_over_phase._spec().outputs, op) - self._field = Output(max_over_phase._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(max_over_phase._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py b/src/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py index ef17502bd3b..9dc29047cd8 100644 --- a/src/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py +++ b/src/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class max_over_time_by_entity(Operator): r"""Evaluates maximum over time/frequency. @@ -177,19 +181,21 @@ class InputsMaxOverTimeByEntity(_Inputs): def __init__(self, op: Operator): super().__init__(max_over_time_by_entity._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( max_over_time_by_entity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._abs_value = Input(max_over_time_by_entity._spec().input_pin(3), 3, op, -1) + self._abs_value: Input[bool] = Input( + max_over_time_by_entity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._abs_value) - self._compute_amplitude = Input( + self._compute_amplitude: Input[bool] = Input( max_over_time_by_entity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._compute_amplitude) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -208,7 +214,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def abs_value(self) -> Input: + def abs_value(self) -> Input[bool]: r"""Allows to connect abs_value input to the operator. Should use absolute value. @@ -229,7 +235,7 @@ def abs_value(self) -> Input: return self._abs_value @property - def compute_amplitude(self) -> Input: + def compute_amplitude(self) -> Input[bool]: r"""Allows to connect compute_amplitude input to the operator. Do calculate amplitude. @@ -264,13 +270,13 @@ class OutputsMaxOverTimeByEntity(_Outputs): def __init__(self, op: Operator): super().__init__(max_over_time_by_entity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( max_over_time_by_entity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_by_component.py b/src/ansys/dpf/core/operators/min_max/min_by_component.py index a29e79c6066..80a89ea0d9a 100644 --- a/src/ansys/dpf/core/operators/min_max/min_by_component.py +++ b/src/ansys/dpf/core/operators/min_max/min_by_component.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class min_by_component(Operator): r"""Give the minimum for each element rank by comparing several fields. @@ -178,17 +183,21 @@ class InputsMinByComponent(_Inputs): def __init__(self, op: Operator): super().__init__(min_by_component._spec().inputs, op) - self._use_absolute_value = Input( + self._use_absolute_value: Input[bool] = Input( min_by_component._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._use_absolute_value) - self._field1 = Input(min_by_component._spec().input_pin(1), 1, op, 0) + self._field1: Input[Field | FieldsContainer] = Input( + min_by_component._spec().input_pin(1), 1, op, 0 + ) self._inputs.append(self._field1) - self._field2 = Input(min_by_component._spec().input_pin(2), 2, op, 1) + self._field2: Input[Field | FieldsContainer] = Input( + min_by_component._spec().input_pin(2), 2, op, 1 + ) self._inputs.append(self._field2) @property - def use_absolute_value(self) -> Input: + def use_absolute_value(self) -> Input[bool]: r"""Allows to connect use_absolute_value input to the operator. use_absolute_value @@ -209,7 +218,7 @@ def use_absolute_value(self) -> Input: return self._use_absolute_value @property - def field1(self) -> Input: + def field1(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field1 input to the operator. field or fields container with only one field is expected @@ -230,7 +239,7 @@ def field1(self) -> Input: return self._field1 @property - def field2(self) -> Input: + def field2(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field2 input to the operator. field or fields container with only one field is expected @@ -265,11 +274,13 @@ class OutputsMinByComponent(_Outputs): def __init__(self, op: Operator): super().__init__(min_by_component._spec().outputs, op) - self._field = Output(min_by_component._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + min_by_component._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_max.py b/src/ansys/dpf/core/operators/min_max/min_max.py index fe70266c0a1..9df222b461c 100644 --- a/src/ansys/dpf/core/operators/min_max/min_max.py +++ b/src/ansys/dpf/core/operators/min_max/min_max.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class min_max(Operator): r"""Compute the component-wise minimum (out 0) and maximum (out 1) over a @@ -151,11 +156,13 @@ class InputsMinMax(_Inputs): def __init__(self, op: Operator): super().__init__(min_max._spec().inputs, op) - self._field = Input(min_max._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + min_max._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -191,13 +198,13 @@ class OutputsMinMax(_Outputs): def __init__(self, op: Operator): super().__init__(min_max._spec().outputs, op) - self._field_min = Output(min_max._spec().output_pin(0), 0, op) + self._field_min: Output[Field] = Output(min_max._spec().output_pin(0), 0, op) self._outputs.append(self._field_min) - self._field_max = Output(min_max._spec().output_pin(1), 1, op) + self._field_max: Output[Field] = Output(min_max._spec().output_pin(1), 1, op) self._outputs.append(self._field_max) @property - def field_min(self) -> Output: + def field_min(self) -> Output[Field]: r"""Allows to get field_min output of the operator Returns @@ -215,7 +222,7 @@ def field_min(self) -> Output: return self._field_min @property - def field_max(self) -> Output: + def field_max(self) -> Output[Field]: r"""Allows to get field_max output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_max_by_entity.py b/src/ansys/dpf/core/operators/min_max/min_max_by_entity.py index 9d3dc166b74..a45092330df 100644 --- a/src/ansys/dpf/core/operators/min_max/min_max_by_entity.py +++ b/src/ansys/dpf/core/operators/min_max/min_max_by_entity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class min_max_by_entity(Operator): r"""Compute the entity-wise minimum (out 0) and maximum (out 1) through all @@ -150,13 +155,13 @@ class InputsMinMaxByEntity(_Inputs): def __init__(self, op: Operator): super().__init__(min_max_by_entity._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( min_max_by_entity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -190,13 +195,17 @@ class OutputsMinMaxByEntity(_Outputs): def __init__(self, op: Operator): super().__init__(min_max_by_entity._spec().outputs, op) - self._field_min = Output(min_max_by_entity._spec().output_pin(0), 0, op) + self._field_min: Output[Field] = Output( + min_max_by_entity._spec().output_pin(0), 0, op + ) self._outputs.append(self._field_min) - self._field_max = Output(min_max_by_entity._spec().output_pin(1), 1, op) + self._field_max: Output[Field] = Output( + min_max_by_entity._spec().output_pin(1), 1, op + ) self._outputs.append(self._field_max) @property - def field_min(self) -> Output: + def field_min(self) -> Output[Field]: r"""Allows to get field_min output of the operator Returns @@ -214,7 +223,7 @@ def field_min(self) -> Output: return self._field_min @property - def field_max(self) -> Output: + def field_max(self) -> Output[Field]: r"""Allows to get field_max output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_max_by_time.py b/src/ansys/dpf/core/operators/min_max/min_max_by_time.py index fd6f7175a3f..7d2e5e32574 100644 --- a/src/ansys/dpf/core/operators/min_max/min_max_by_time.py +++ b/src/ansys/dpf/core/operators/min_max/min_max_by_time.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class min_max_by_time(Operator): r"""Evaluates minimum, maximum by time or frequency over all the entities of @@ -171,15 +175,17 @@ class InputsMinMaxByTime(_Inputs): def __init__(self, op: Operator): super().__init__(min_max_by_time._spec().inputs, op) - self._fields_container = Input(min_max_by_time._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + min_max_by_time._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._compute_absolute_value = Input( + self._compute_absolute_value: Input[bool] = Input( min_max_by_time._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._compute_absolute_value) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -198,7 +204,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def compute_absolute_value(self) -> Input: + def compute_absolute_value(self) -> Input[bool]: r"""Allows to connect compute_absolute_value input to the operator. Calculate the absolute value of field entities before computing the min/max. @@ -234,13 +240,17 @@ class OutputsMinMaxByTime(_Outputs): def __init__(self, op: Operator): super().__init__(min_max_by_time._spec().outputs, op) - self._min = Output(min_max_by_time._spec().output_pin(0), 0, op) + self._min: Output[FieldsContainer] = Output( + min_max_by_time._spec().output_pin(0), 0, op + ) self._outputs.append(self._min) - self._max = Output(min_max_by_time._spec().output_pin(1), 1, op) + self._max: Output[FieldsContainer] = Output( + min_max_by_time._spec().output_pin(1), 1, op + ) self._outputs.append(self._max) @property - def min(self) -> Output: + def min(self) -> Output[FieldsContainer]: r"""Allows to get min output of the operator Returns @@ -258,7 +268,7 @@ def min(self) -> Output: return self._min @property - def max(self) -> Output: + def max(self) -> Output[FieldsContainer]: r"""Allows to get max output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_max_fc.py b/src/ansys/dpf/core/operators/min_max/min_max_fc.py index 51b3d5a49d2..3aaa7215b73 100644 --- a/src/ansys/dpf/core/operators/min_max/min_max_fc.py +++ b/src/ansys/dpf/core/operators/min_max/min_max_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class min_max_fc(Operator): r"""Compute the component-wise minimum (out 0) and maximum (out 1) over a @@ -150,11 +155,13 @@ class InputsMinMaxFc(_Inputs): def __init__(self, op: Operator): super().__init__(min_max_fc._spec().inputs, op) - self._fields_container = Input(min_max_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + min_max_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -188,13 +195,13 @@ class OutputsMinMaxFc(_Outputs): def __init__(self, op: Operator): super().__init__(min_max_fc._spec().outputs, op) - self._field_min = Output(min_max_fc._spec().output_pin(0), 0, op) + self._field_min: Output[Field] = Output(min_max_fc._spec().output_pin(0), 0, op) self._outputs.append(self._field_min) - self._field_max = Output(min_max_fc._spec().output_pin(1), 1, op) + self._field_max: Output[Field] = Output(min_max_fc._spec().output_pin(1), 1, op) self._outputs.append(self._field_max) @property - def field_min(self) -> Output: + def field_min(self) -> Output[Field]: r"""Allows to get field_min output of the operator Returns @@ -212,7 +219,7 @@ def field_min(self) -> Output: return self._field_min @property - def field_max(self) -> Output: + def field_max(self) -> Output[Field]: r"""Allows to get field_max output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_max_fc_inc.py b/src/ansys/dpf/core/operators/min_max/min_max_fc_inc.py index 9385a0b1eb9..0160586d58c 100644 --- a/src/ansys/dpf/core/operators/min_max/min_max_fc_inc.py +++ b/src/ansys/dpf/core/operators/min_max/min_max_fc_inc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class min_max_fc_inc(Operator): r"""Compute the component-wise minimum (out 0) and maximum (out 1) over a @@ -150,11 +155,13 @@ class InputsMinMaxFcInc(_Inputs): def __init__(self, op: Operator): super().__init__(min_max_fc_inc._spec().inputs, op) - self._fields_container = Input(min_max_fc_inc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + min_max_fc_inc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -188,13 +195,17 @@ class OutputsMinMaxFcInc(_Outputs): def __init__(self, op: Operator): super().__init__(min_max_fc_inc._spec().outputs, op) - self._field_min = Output(min_max_fc_inc._spec().output_pin(0), 0, op) + self._field_min: Output[Field] = Output( + min_max_fc_inc._spec().output_pin(0), 0, op + ) self._outputs.append(self._field_min) - self._field_max = Output(min_max_fc_inc._spec().output_pin(1), 1, op) + self._field_max: Output[Field] = Output( + min_max_fc_inc._spec().output_pin(1), 1, op + ) self._outputs.append(self._field_max) @property - def field_min(self) -> Output: + def field_min(self) -> Output[Field]: r"""Allows to get field_min output of the operator Returns @@ -212,7 +223,7 @@ def field_min(self) -> Output: return self._field_min @property - def field_max(self) -> Output: + def field_max(self) -> Output[Field]: r"""Allows to get field_max output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_max_inc.py b/src/ansys/dpf/core/operators/min_max/min_max_inc.py index 42acccf0c36..1e0a0a92173 100644 --- a/src/ansys/dpf/core/operators/min_max/min_max_inc.py +++ b/src/ansys/dpf/core/operators/min_max/min_max_inc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + class min_max_inc(Operator): r"""Compute the component-wise minimum (out 0) and maximum (out 1) over @@ -180,13 +185,15 @@ class InputsMinMaxInc(_Inputs): def __init__(self, op: Operator): super().__init__(min_max_inc._spec().inputs, op) - self._field = Input(min_max_inc._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(min_max_inc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._domain_id = Input(min_max_inc._spec().input_pin(17), 17, op, -1) + self._domain_id: Input[int] = Input( + min_max_inc._spec().input_pin(17), 17, op, -1 + ) self._inputs.append(self._domain_id) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -205,7 +212,7 @@ def field(self) -> Input: return self._field @property - def domain_id(self) -> Input: + def domain_id(self) -> Input[int]: r"""Allows to connect domain_id input to the operator. Returns @@ -241,17 +248,25 @@ class OutputsMinMaxInc(_Outputs): def __init__(self, op: Operator): super().__init__(min_max_inc._spec().outputs, op) - self._field_min = Output(min_max_inc._spec().output_pin(0), 0, op) + self._field_min: Output[Field] = Output( + min_max_inc._spec().output_pin(0), 0, op + ) self._outputs.append(self._field_min) - self._field_max = Output(min_max_inc._spec().output_pin(1), 1, op) + self._field_max: Output[Field] = Output( + min_max_inc._spec().output_pin(1), 1, op + ) self._outputs.append(self._field_max) - self._domain_ids_min = Output(min_max_inc._spec().output_pin(2), 2, op) + self._domain_ids_min: Output[Scoping] = Output( + min_max_inc._spec().output_pin(2), 2, op + ) self._outputs.append(self._domain_ids_min) - self._domain_ids_max = Output(min_max_inc._spec().output_pin(3), 3, op) + self._domain_ids_max: Output[Scoping] = Output( + min_max_inc._spec().output_pin(3), 3, op + ) self._outputs.append(self._domain_ids_max) @property - def field_min(self) -> Output: + def field_min(self) -> Output[Field]: r"""Allows to get field_min output of the operator Returns @@ -269,7 +284,7 @@ def field_min(self) -> Output: return self._field_min @property - def field_max(self) -> Output: + def field_max(self) -> Output[Field]: r"""Allows to get field_max output of the operator Returns @@ -287,7 +302,7 @@ def field_max(self) -> Output: return self._field_max @property - def domain_ids_min(self) -> Output: + def domain_ids_min(self) -> Output[Scoping]: r"""Allows to get domain_ids_min output of the operator Returns @@ -305,7 +320,7 @@ def domain_ids_min(self) -> Output: return self._domain_ids_min @property - def domain_ids_max(self) -> Output: + def domain_ids_max(self) -> Output[Scoping]: r"""Allows to get domain_ids_max output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py b/src/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py index 479d0d49ea8..bbc296689e1 100644 --- a/src/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py +++ b/src/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class min_max_over_label_fc(Operator): r"""Create two fields (0 min 1 max) by looping over the fields container in @@ -213,15 +219,17 @@ class InputsMinMaxOverLabelFc(_Inputs): def __init__(self, op: Operator): super().__init__(min_max_over_label_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( min_max_over_label_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._label = Input(min_max_over_label_fc._spec().input_pin(1), 1, op, -1) + self._label: Input[str] = Input( + min_max_over_label_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._label) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -240,7 +248,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label(self) -> Input: + def label(self) -> Input[str]: r"""Allows to connect label input to the operator. label name from the fields container @@ -280,29 +288,33 @@ class OutputsMinMaxOverLabelFc(_Outputs): def __init__(self, op: Operator): super().__init__(min_max_over_label_fc._spec().outputs, op) - self._field_min = Output(min_max_over_label_fc._spec().output_pin(0), 0, op) + self._field_min: Output[Field] = Output( + min_max_over_label_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._field_min) - self._field_max = Output(min_max_over_label_fc._spec().output_pin(1), 1, op) + self._field_max: Output[Field] = Output( + min_max_over_label_fc._spec().output_pin(1), 1, op + ) self._outputs.append(self._field_max) - self._domain_ids_min = Output( + self._domain_ids_min: Output[Scoping] = Output( min_max_over_label_fc._spec().output_pin(2), 2, op ) self._outputs.append(self._domain_ids_min) - self._domain_ids_max = Output( + self._domain_ids_max: Output[Scoping] = Output( min_max_over_label_fc._spec().output_pin(3), 3, op ) self._outputs.append(self._domain_ids_max) - self._scoping_ids_min = Output( + self._scoping_ids_min: Output[Scoping] = Output( min_max_over_label_fc._spec().output_pin(4), 4, op ) self._outputs.append(self._scoping_ids_min) - self._scoping_ids_max = Output( + self._scoping_ids_max: Output[Scoping] = Output( min_max_over_label_fc._spec().output_pin(5), 5, op ) self._outputs.append(self._scoping_ids_max) @property - def field_min(self) -> Output: + def field_min(self) -> Output[Field]: r"""Allows to get field_min output of the operator Returns @@ -320,7 +332,7 @@ def field_min(self) -> Output: return self._field_min @property - def field_max(self) -> Output: + def field_max(self) -> Output[Field]: r"""Allows to get field_max output of the operator Returns @@ -338,7 +350,7 @@ def field_max(self) -> Output: return self._field_max @property - def domain_ids_min(self) -> Output: + def domain_ids_min(self) -> Output[Scoping]: r"""Allows to get domain_ids_min output of the operator Returns @@ -356,7 +368,7 @@ def domain_ids_min(self) -> Output: return self._domain_ids_min @property - def domain_ids_max(self) -> Output: + def domain_ids_max(self) -> Output[Scoping]: r"""Allows to get domain_ids_max output of the operator Returns @@ -374,7 +386,7 @@ def domain_ids_max(self) -> Output: return self._domain_ids_max @property - def scoping_ids_min(self) -> Output: + def scoping_ids_min(self) -> Output[Scoping]: r"""Allows to get scoping_ids_min output of the operator Returns @@ -392,7 +404,7 @@ def scoping_ids_min(self) -> Output: return self._scoping_ids_min @property - def scoping_ids_max(self) -> Output: + def scoping_ids_max(self) -> Output[Scoping]: r"""Allows to get scoping_ids_max output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py b/src/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py index d7231021f18..6c36e2f3e42 100644 --- a/src/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py +++ b/src/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class min_max_over_time_by_entity(Operator): r"""| For each entity and component, evaluates minimum and maximum over @@ -217,21 +221,21 @@ class InputsMinMaxOverTimeByEntity(_Inputs): def __init__(self, op: Operator): super().__init__(min_max_over_time_by_entity._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( min_max_over_time_by_entity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._compute_absolute_value = Input( + self._compute_absolute_value: Input[bool] = Input( min_max_over_time_by_entity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._compute_absolute_value) - self._compute_amplitude = Input( + self._compute_amplitude: Input[bool] = Input( min_max_over_time_by_entity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._compute_amplitude) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -250,7 +254,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def compute_absolute_value(self) -> Input: + def compute_absolute_value(self) -> Input[bool]: r"""Allows to connect compute_absolute_value input to the operator. Calculate the absolute value of field entities before computing the min/max. @@ -271,7 +275,7 @@ def compute_absolute_value(self) -> Input: return self._compute_absolute_value @property - def compute_amplitude(self) -> Input: + def compute_amplitude(self) -> Input[bool]: r"""Allows to connect compute_amplitude input to the operator. Do calculate amplitude. @@ -309,21 +313,25 @@ class OutputsMinMaxOverTimeByEntity(_Outputs): def __init__(self, op: Operator): super().__init__(min_max_over_time_by_entity._spec().outputs, op) - self._min = Output(min_max_over_time_by_entity._spec().output_pin(0), 0, op) + self._min: Output[FieldsContainer] = Output( + min_max_over_time_by_entity._spec().output_pin(0), 0, op + ) self._outputs.append(self._min) - self._max = Output(min_max_over_time_by_entity._spec().output_pin(1), 1, op) + self._max: Output[FieldsContainer] = Output( + min_max_over_time_by_entity._spec().output_pin(1), 1, op + ) self._outputs.append(self._max) - self._time_freq_of_min = Output( + self._time_freq_of_min: Output[FieldsContainer] = Output( min_max_over_time_by_entity._spec().output_pin(2), 2, op ) self._outputs.append(self._time_freq_of_min) - self._time_freq_of_max = Output( + self._time_freq_of_max: Output[FieldsContainer] = Output( min_max_over_time_by_entity._spec().output_pin(3), 3, op ) self._outputs.append(self._time_freq_of_max) @property - def min(self) -> Output: + def min(self) -> Output[FieldsContainer]: r"""Allows to get min output of the operator Returns @@ -341,7 +349,7 @@ def min(self) -> Output: return self._min @property - def max(self) -> Output: + def max(self) -> Output[FieldsContainer]: r"""Allows to get max output of the operator Returns @@ -359,7 +367,7 @@ def max(self) -> Output: return self._max @property - def time_freq_of_min(self) -> Output: + def time_freq_of_min(self) -> Output[FieldsContainer]: r"""Allows to get time_freq_of_min output of the operator Returns @@ -377,7 +385,7 @@ def time_freq_of_min(self) -> Output: return self._time_freq_of_min @property - def time_freq_of_max(self) -> Output: + def time_freq_of_max(self) -> Output[FieldsContainer]: r"""Allows to get time_freq_of_max output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py b/src/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py index 596c5bb792d..519ee615bee 100644 --- a/src/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py +++ b/src/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class min_over_time_by_entity(Operator): r"""Evaluates minimum over time/frequency. @@ -177,19 +181,21 @@ class InputsMinOverTimeByEntity(_Inputs): def __init__(self, op: Operator): super().__init__(min_over_time_by_entity._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( min_over_time_by_entity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._abs_value = Input(min_over_time_by_entity._spec().input_pin(3), 3, op, -1) + self._abs_value: Input[bool] = Input( + min_over_time_by_entity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._abs_value) - self._compute_amplitude = Input( + self._compute_amplitude: Input[bool] = Input( min_over_time_by_entity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._compute_amplitude) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -208,7 +214,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def abs_value(self) -> Input: + def abs_value(self) -> Input[bool]: r"""Allows to connect abs_value input to the operator. Should use absolute value. @@ -229,7 +235,7 @@ def abs_value(self) -> Input: return self._abs_value @property - def compute_amplitude(self) -> Input: + def compute_amplitude(self) -> Input[bool]: r"""Allows to connect compute_amplitude input to the operator. Do calculate amplitude. @@ -264,13 +270,13 @@ class OutputsMinOverTimeByEntity(_Outputs): def __init__(self, op: Operator): super().__init__(min_over_time_by_entity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( min_over_time_by_entity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/phase_of_max.py b/src/ansys/dpf/core/operators/min_max/phase_of_max.py index 1d528d86772..95d0a6f57ae 100644 --- a/src/ansys/dpf/core/operators/min_max/phase_of_max.py +++ b/src/ansys/dpf/core/operators/min_max/phase_of_max.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class phase_of_max(Operator): r"""Evaluates phase of maximum. @@ -192,17 +196,25 @@ class InputsPhaseOfMax(_Inputs): def __init__(self, op: Operator): super().__init__(phase_of_max._spec().inputs, op) - self._real_field = Input(phase_of_max._spec().input_pin(0), 0, op, -1) + self._real_field: Input[Field] = Input( + phase_of_max._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._real_field) - self._imaginary_field = Input(phase_of_max._spec().input_pin(1), 1, op, -1) + self._imaginary_field: Input[Field] = Input( + phase_of_max._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._imaginary_field) - self._abs_value = Input(phase_of_max._spec().input_pin(2), 2, op, -1) + self._abs_value: Input[bool] = Input( + phase_of_max._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._abs_value) - self._phase_increment = Input(phase_of_max._spec().input_pin(3), 3, op, -1) + self._phase_increment: Input[float] = Input( + phase_of_max._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._phase_increment) @property - def real_field(self) -> Input: + def real_field(self) -> Input[Field]: r"""Allows to connect real_field input to the operator. Returns @@ -221,7 +233,7 @@ def real_field(self) -> Input: return self._real_field @property - def imaginary_field(self) -> Input: + def imaginary_field(self) -> Input[Field]: r"""Allows to connect imaginary_field input to the operator. Returns @@ -240,7 +252,7 @@ def imaginary_field(self) -> Input: return self._imaginary_field @property - def abs_value(self) -> Input: + def abs_value(self) -> Input[bool]: r"""Allows to connect abs_value input to the operator. Should use absolute value. @@ -261,7 +273,7 @@ def abs_value(self) -> Input: return self._abs_value @property - def phase_increment(self) -> Input: + def phase_increment(self) -> Input[float]: r"""Allows to connect phase_increment input to the operator. Phase increment. @@ -296,11 +308,11 @@ class OutputsPhaseOfMax(_Outputs): def __init__(self, op: Operator): super().__init__(phase_of_max._spec().outputs, op) - self._field = Output(phase_of_max._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(phase_of_max._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py b/src/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py index 3efbf84b081..941ca430f11 100644 --- a/src/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py +++ b/src/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class time_of_max_by_entity(Operator): r"""Evaluates time/frequency of maximum. @@ -177,19 +181,21 @@ class InputsTimeOfMaxByEntity(_Inputs): def __init__(self, op: Operator): super().__init__(time_of_max_by_entity._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( time_of_max_by_entity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._abs_value = Input(time_of_max_by_entity._spec().input_pin(3), 3, op, -1) + self._abs_value: Input[bool] = Input( + time_of_max_by_entity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._abs_value) - self._compute_amplitude = Input( + self._compute_amplitude: Input[bool] = Input( time_of_max_by_entity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._compute_amplitude) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -208,7 +214,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def abs_value(self) -> Input: + def abs_value(self) -> Input[bool]: r"""Allows to connect abs_value input to the operator. Should use absolute value. @@ -229,7 +235,7 @@ def abs_value(self) -> Input: return self._abs_value @property - def compute_amplitude(self) -> Input: + def compute_amplitude(self) -> Input[bool]: r"""Allows to connect compute_amplitude input to the operator. Do calculate amplitude. @@ -264,13 +270,13 @@ class OutputsTimeOfMaxByEntity(_Outputs): def __init__(self, op: Operator): super().__init__(time_of_max_by_entity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( time_of_max_by_entity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py b/src/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py index cf549b386f6..4c91f38daa8 100644 --- a/src/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py +++ b/src/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class time_of_min_by_entity(Operator): r"""Evaluates time/frequency of minimum. @@ -177,19 +181,21 @@ class InputsTimeOfMinByEntity(_Inputs): def __init__(self, op: Operator): super().__init__(time_of_min_by_entity._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( time_of_min_by_entity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._abs_value = Input(time_of_min_by_entity._spec().input_pin(3), 3, op, -1) + self._abs_value: Input[bool] = Input( + time_of_min_by_entity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._abs_value) - self._compute_amplitude = Input( + self._compute_amplitude: Input[bool] = Input( time_of_min_by_entity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._compute_amplitude) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -208,7 +214,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def abs_value(self) -> Input: + def abs_value(self) -> Input[bool]: r"""Allows to connect abs_value input to the operator. Should use absolute value. @@ -229,7 +235,7 @@ def abs_value(self) -> Input: return self._abs_value @property - def compute_amplitude(self) -> Input: + def compute_amplitude(self) -> Input[bool]: r"""Allows to connect compute_amplitude input to the operator. Do calculate amplitude. @@ -264,13 +270,13 @@ class OutputsTimeOfMinByEntity(_Outputs): def __init__(self, op: Operator): super().__init__(time_of_min_by_entity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( time_of_min_by_entity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/operator.mustache b/src/ansys/dpf/core/operators/operator.mustache index 6b12b2d91a3..6a31ff2ee1c 100644 --- a/src/ansys/dpf/core/operators/operator.mustache +++ b/src/ansys/dpf/core/operators/operator.mustache @@ -5,6 +5,9 @@ Autogenerated DPF operator classes. """ from __future__ import annotations +{{#non_empty_annotation_import_list}} +from typing import TYPE_CHECKING +{{/non_empty_annotation_import_list}} from warnings import warn {{#has_internal_name_alias}} @@ -25,6 +28,12 @@ from ansys.dpf.core.operators.specification import PinSpecification, Specificati from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +{{#non_empty_annotation_import_list}} +if TYPE_CHECKING: +{{/non_empty_annotation_import_list}} +{{#annotation_import_list}} + from {{definition_location}} import {{class_name}} +{{/annotation_import_list}} class {{class_name}}(Operator): r"""{{{docstring}}} @@ -223,13 +232,13 @@ class Inputs{{capital_class_name}}(_Inputs): def __init__(self, op: Operator): super().__init__({{class_name}}._spec().inputs, op) {{#input_pins}} - self._{{name}} = Input({{class_name}}._spec().input_pin({{id}}), {{id}}, op, {{ellipsis}}) + self._{{name}}: Input{{#type_list_for_annotation}}[{{type_list_for_annotation}}]{{/type_list_for_annotation}} = Input({{class_name}}._spec().input_pin({{id}}), {{id}}, op, {{ellipsis}}) self._inputs.append(self._{{name}}) {{/input_pins}} {{#input_pins}} @property - def {{name}}(self) -> Input: + def {{name}}(self) -> Input{{#type_list_for_annotation}}[{{type_list_for_annotation}}]{{/type_list_for_annotation}}: r"""Allows to connect {{name}} input to the operator. {{#document_pin_docstring}} @@ -289,7 +298,7 @@ class Outputs{{capital_class_name}}(_Outputs): {{/printable_type_names}} {{/multiple_types}} {{^multiple_types}} - self._{{name}} = Output({{class_name}}._spec().output_pin({{id}}), {{id}}, op) + self._{{name}}: Output{{#type_list_for_annotation}}[{{type_list_for_annotation}}]{{/type_list_for_annotation}} = Output({{class_name}}._spec().output_pin({{id}}), {{id}}, op) self._outputs.append(self._{{name}}) {{/multiple_types}} {{/output_pins}} @@ -297,7 +306,7 @@ class Outputs{{capital_class_name}}(_Outputs): {{^multiple_types}} @property - def {{name}}(self) -> Output: + def {{name}}(self) -> Output{{#type_list_for_annotation}}[{{type_list_for_annotation}}]{{/type_list_for_annotation}}: r"""Allows to get {{name}} output of the operator {{#document_pin_docstring}} diff --git a/src/ansys/dpf/core/operators/result/acceleration.py b/src/ansys/dpf/core/operators/result/acceleration.py index 238984797f4..4ff5b2426f2 100644 --- a/src/ansys/dpf/core/operators/result/acceleration.py +++ b/src/ansys/dpf/core/operators/result/acceleration.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class acceleration(Operator): r"""Read/compute nodal accelerations by calling the readers defined by the @@ -315,35 +326,51 @@ class InputsAcceleration(_Inputs): def __init__(self, op: Operator): super().__init__(acceleration._spec().inputs, op) - self._time_scoping = Input(acceleration._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + acceleration._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(acceleration._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + acceleration._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(acceleration._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + acceleration._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(acceleration._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + acceleration._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(acceleration._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + acceleration._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( acceleration._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(acceleration._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + acceleration._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(acceleration._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + acceleration._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( acceleration._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(acceleration._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( + acceleration._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(acceleration._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input(acceleration._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +391,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +412,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -406,7 +433,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +454,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +475,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -469,7 +496,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -490,7 +517,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -511,7 +538,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -532,7 +559,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -553,7 +580,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -588,11 +615,13 @@ class OutputsAcceleration(_Outputs): def __init__(self, op: Operator): super().__init__(acceleration._spec().outputs, op) - self._fields_container = Output(acceleration._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + acceleration._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/acceleration_X.py b/src/ansys/dpf/core/operators/result/acceleration_X.py index cff3ad4043d..7d4ccdf5828 100644 --- a/src/ansys/dpf/core/operators/result/acceleration_X.py +++ b/src/ansys/dpf/core/operators/result/acceleration_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class acceleration_X(Operator): r"""Read/compute nodal accelerations X component of the vector (1st @@ -267,27 +278,41 @@ class InputsAccelerationX(_Inputs): def __init__(self, op: Operator): super().__init__(acceleration_X._spec().inputs, op) - self._time_scoping = Input(acceleration_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + acceleration_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(acceleration_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + acceleration_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(acceleration_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + acceleration_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(acceleration_X._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + acceleration_X._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(acceleration_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + acceleration_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( acceleration_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(acceleration_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + acceleration_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(acceleration_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + acceleration_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -308,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -329,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -350,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -371,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -392,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -413,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -469,11 +494,13 @@ class OutputsAccelerationX(_Outputs): def __init__(self, op: Operator): super().__init__(acceleration_X._spec().outputs, op) - self._fields_container = Output(acceleration_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + acceleration_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/acceleration_Y.py b/src/ansys/dpf/core/operators/result/acceleration_Y.py index a3beac4cf17..e03b99a78f8 100644 --- a/src/ansys/dpf/core/operators/result/acceleration_Y.py +++ b/src/ansys/dpf/core/operators/result/acceleration_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class acceleration_Y(Operator): r"""Read/compute nodal accelerations Y component of the vector (2nd @@ -267,27 +278,41 @@ class InputsAccelerationY(_Inputs): def __init__(self, op: Operator): super().__init__(acceleration_Y._spec().inputs, op) - self._time_scoping = Input(acceleration_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + acceleration_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(acceleration_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + acceleration_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(acceleration_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + acceleration_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(acceleration_Y._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + acceleration_Y._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(acceleration_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + acceleration_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( acceleration_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(acceleration_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + acceleration_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(acceleration_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + acceleration_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -308,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -329,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -350,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -371,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -392,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -413,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -469,11 +494,13 @@ class OutputsAccelerationY(_Outputs): def __init__(self, op: Operator): super().__init__(acceleration_Y._spec().outputs, op) - self._fields_container = Output(acceleration_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + acceleration_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/acceleration_Z.py b/src/ansys/dpf/core/operators/result/acceleration_Z.py index 051d941f186..39217da6071 100644 --- a/src/ansys/dpf/core/operators/result/acceleration_Z.py +++ b/src/ansys/dpf/core/operators/result/acceleration_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class acceleration_Z(Operator): r"""Read/compute nodal accelerations Z component of the vector (3rd @@ -267,27 +278,41 @@ class InputsAccelerationZ(_Inputs): def __init__(self, op: Operator): super().__init__(acceleration_Z._spec().inputs, op) - self._time_scoping = Input(acceleration_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + acceleration_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(acceleration_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + acceleration_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(acceleration_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + acceleration_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(acceleration_Z._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + acceleration_Z._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(acceleration_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + acceleration_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( acceleration_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(acceleration_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + acceleration_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(acceleration_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + acceleration_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -308,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -329,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -350,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -371,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -392,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -413,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -469,11 +494,13 @@ class OutputsAccelerationZ(_Outputs): def __init__(self, op: Operator): super().__init__(acceleration_Z._spec().outputs, op) - self._fields_container = Output(acceleration_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + acceleration_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py b/src/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py index f77f11f5395..530190ed356 100644 --- a/src/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py +++ b/src/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class accu_eqv_creep_strain(Operator): r"""Read/compute element nodal accumulated equivalent creep strain by @@ -555,55 +566,57 @@ class InputsAccuEqvCreepStrain(_Inputs): def __init__(self, op: Operator): super().__init__(accu_eqv_creep_strain._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( accu_eqv_creep_strain._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( accu_eqv_creep_strain._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( accu_eqv_creep_strain._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( accu_eqv_creep_strain._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( accu_eqv_creep_strain._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( accu_eqv_creep_strain._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(accu_eqv_creep_strain._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + accu_eqv_creep_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( accu_eqv_creep_strain._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( accu_eqv_creep_strain._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( accu_eqv_creep_strain._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( accu_eqv_creep_strain._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( accu_eqv_creep_strain._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -624,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -645,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -666,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -687,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -708,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -729,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -750,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -771,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -792,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -813,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -834,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -869,13 +882,13 @@ class OutputsAccuEqvCreepStrain(_Outputs): def __init__(self, op: Operator): super().__init__(accu_eqv_creep_strain._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( accu_eqv_creep_strain._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py b/src/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py index cb1736d73c6..55bf704f241 100644 --- a/src/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py +++ b/src/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class accu_eqv_plastic_strain(Operator): r"""Read/compute element nodal accumulated equivalent plastic strain by @@ -555,55 +566,57 @@ class InputsAccuEqvPlasticStrain(_Inputs): def __init__(self, op: Operator): super().__init__(accu_eqv_plastic_strain._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( accu_eqv_plastic_strain._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( accu_eqv_plastic_strain._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( accu_eqv_plastic_strain._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( accu_eqv_plastic_strain._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( accu_eqv_plastic_strain._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( accu_eqv_plastic_strain._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(accu_eqv_plastic_strain._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + accu_eqv_plastic_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( accu_eqv_plastic_strain._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( accu_eqv_plastic_strain._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( accu_eqv_plastic_strain._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( accu_eqv_plastic_strain._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( accu_eqv_plastic_strain._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -624,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -645,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -666,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -687,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -708,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -729,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -750,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -771,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -792,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -813,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -834,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -869,13 +882,13 @@ class OutputsAccuEqvPlasticStrain(_Outputs): def __init__(self, op: Operator): super().__init__(accu_eqv_plastic_strain._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( accu_eqv_plastic_strain._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/add_rigid_body_motion.py b/src/ansys/dpf/core/operators/result/add_rigid_body_motion.py index c6c8a34daf2..4fd74379f60 100644 --- a/src/ansys/dpf/core/operators/result/add_rigid_body_motion.py +++ b/src/ansys/dpf/core/operators/result/add_rigid_body_motion.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + class add_rigid_body_motion(Operator): r"""Adds a given rigid translation, center and rotation from a displacement @@ -210,27 +215,29 @@ class InputsAddRigidBodyMotion(_Inputs): def __init__(self, op: Operator): super().__init__(add_rigid_body_motion._spec().inputs, op) - self._displacement_field = Input( + self._displacement_field: Input[Field] = Input( add_rigid_body_motion._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._displacement_field) - self._translation_field = Input( + self._translation_field: Input[Field] = Input( add_rigid_body_motion._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._translation_field) - self._rotation_field = Input( + self._rotation_field: Input[Field] = Input( add_rigid_body_motion._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._rotation_field) - self._center_field = Input( + self._center_field: Input[Field] = Input( add_rigid_body_motion._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._center_field) - self._mesh = Input(add_rigid_body_motion._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + add_rigid_body_motion._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def displacement_field(self) -> Input: + def displacement_field(self) -> Input[Field]: r"""Allows to connect displacement_field input to the operator. Returns @@ -249,7 +256,7 @@ def displacement_field(self) -> Input: return self._displacement_field @property - def translation_field(self) -> Input: + def translation_field(self) -> Input[Field]: r"""Allows to connect translation_field input to the operator. Returns @@ -268,7 +275,7 @@ def translation_field(self) -> Input: return self._translation_field @property - def rotation_field(self) -> Input: + def rotation_field(self) -> Input[Field]: r"""Allows to connect rotation_field input to the operator. Returns @@ -287,7 +294,7 @@ def rotation_field(self) -> Input: return self._rotation_field @property - def center_field(self) -> Input: + def center_field(self) -> Input[Field]: r"""Allows to connect center_field input to the operator. Returns @@ -306,7 +313,7 @@ def center_field(self) -> Input: return self._center_field @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. default is the mesh in the support @@ -341,11 +348,13 @@ class OutputsAddRigidBodyMotion(_Outputs): def __init__(self, op: Operator): super().__init__(add_rigid_body_motion._spec().outputs, op) - self._field = Output(add_rigid_body_motion._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + add_rigid_body_motion._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py b/src/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py index 26fbb3e2f58..79f486a6eb7 100644 --- a/src/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py +++ b/src/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class add_rigid_body_motion_fc(Operator): r"""Adds a given rigid translation, center and rotation from a displacement @@ -210,27 +216,29 @@ class InputsAddRigidBodyMotionFc(_Inputs): def __init__(self, op: Operator): super().__init__(add_rigid_body_motion_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( add_rigid_body_motion_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._translation_field = Input( + self._translation_field: Input[Field] = Input( add_rigid_body_motion_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._translation_field) - self._rotation_field = Input( + self._rotation_field: Input[Field] = Input( add_rigid_body_motion_fc._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._rotation_field) - self._center_field = Input( + self._center_field: Input[Field] = Input( add_rigid_body_motion_fc._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._center_field) - self._mesh = Input(add_rigid_body_motion_fc._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + add_rigid_body_motion_fc._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -249,7 +257,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def translation_field(self) -> Input: + def translation_field(self) -> Input[Field]: r"""Allows to connect translation_field input to the operator. Returns @@ -268,7 +276,7 @@ def translation_field(self) -> Input: return self._translation_field @property - def rotation_field(self) -> Input: + def rotation_field(self) -> Input[Field]: r"""Allows to connect rotation_field input to the operator. Returns @@ -287,7 +295,7 @@ def rotation_field(self) -> Input: return self._rotation_field @property - def center_field(self) -> Input: + def center_field(self) -> Input[Field]: r"""Allows to connect center_field input to the operator. Returns @@ -306,7 +314,7 @@ def center_field(self) -> Input: return self._center_field @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. default is the mesh in the support @@ -341,13 +349,13 @@ class OutputsAddRigidBodyMotionFc(_Outputs): def __init__(self, op: Operator): super().__init__(add_rigid_body_motion_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( add_rigid_body_motion_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/artificial_hourglass_energy.py b/src/ansys/dpf/core/operators/result/artificial_hourglass_energy.py index f6a1ba1eea2..dc8ea24ac64 100644 --- a/src/ansys/dpf/core/operators/result/artificial_hourglass_energy.py +++ b/src/ansys/dpf/core/operators/result/artificial_hourglass_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class artificial_hourglass_energy(Operator): r"""Read/compute artificial hourglass energy by calling the readers defined @@ -251,35 +262,37 @@ class InputsArtificialHourglassEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(artificial_hourglass_energy._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( artificial_hourglass_energy._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( artificial_hourglass_energy._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( artificial_hourglass_energy._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( artificial_hourglass_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( artificial_hourglass_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( artificial_hourglass_energy._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(artificial_hourglass_energy._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + artificial_hourglass_energy._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsArtificialHourglassEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(artificial_hourglass_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( artificial_hourglass_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_axial_force.py b/src/ansys/dpf/core/operators/result/beam_axial_force.py index dd7f5eb44b2..930d84f78c1 100644 --- a/src/ansys/dpf/core/operators/result/beam_axial_force.py +++ b/src/ansys/dpf/core/operators/result/beam_axial_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_axial_force(Operator): r"""Read Beam X Axial Force by calling the readers defined by the @@ -223,21 +233,29 @@ class InputsBeamAxialForce(_Inputs): def __init__(self, op: Operator): super().__init__(beam_axial_force._spec().inputs, op) - self._time_scoping = Input(beam_axial_force._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + beam_axial_force._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(beam_axial_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + beam_axial_force._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_axial_force._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(beam_axial_force._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + beam_axial_force._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(beam_axial_force._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + beam_axial_force._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -258,7 +276,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -279,7 +297,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -300,7 +318,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -321,7 +339,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -356,11 +374,13 @@ class OutputsBeamAxialForce(_Outputs): def __init__(self, op: Operator): super().__init__(beam_axial_force._spec().outputs, op) - self._fields_container = Output(beam_axial_force._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + beam_axial_force._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_axial_plastic_strain.py b/src/ansys/dpf/core/operators/result/beam_axial_plastic_strain.py index f9da51d16ac..153c60d474d 100644 --- a/src/ansys/dpf/core/operators/result/beam_axial_plastic_strain.py +++ b/src/ansys/dpf/core/operators/result/beam_axial_plastic_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_axial_plastic_strain(Operator): r"""Read Beam Axial Plastic strain (LSDyna) by calling the readers defined @@ -239,33 +249,33 @@ class InputsBeamAxialPlasticStrain(_Inputs): def __init__(self, op: Operator): super().__init__(beam_axial_plastic_strain._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( beam_axial_plastic_strain._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( beam_axial_plastic_strain._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_axial_plastic_strain._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( beam_axial_plastic_strain._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._integration_point = Input( + self._integration_point: Input[int] = Input( beam_axial_plastic_strain._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._integration_point) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( beam_axial_plastic_strain._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -286,7 +296,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -307,7 +317,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -328,7 +338,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -349,7 +359,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def integration_point(self) -> Input: + def integration_point(self) -> Input[int]: r"""Allows to connect integration_point input to the operator. integration point where the result will be read from. Default value: 0 (first integration point). @@ -370,7 +380,7 @@ def integration_point(self) -> Input: return self._integration_point @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -405,13 +415,13 @@ class OutputsBeamAxialPlasticStrain(_Outputs): def __init__(self, op: Operator): super().__init__(beam_axial_plastic_strain._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( beam_axial_plastic_strain._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_axial_stress.py b/src/ansys/dpf/core/operators/result/beam_axial_stress.py index bb74034ecd7..777e2e3aa1e 100644 --- a/src/ansys/dpf/core/operators/result/beam_axial_stress.py +++ b/src/ansys/dpf/core/operators/result/beam_axial_stress.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_axial_stress(Operator): r"""Read Beam X Axial Stress by calling the readers defined by the @@ -239,25 +249,33 @@ class InputsBeamAxialStress(_Inputs): def __init__(self, op: Operator): super().__init__(beam_axial_stress._spec().inputs, op) - self._time_scoping = Input(beam_axial_stress._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + beam_axial_stress._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(beam_axial_stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + beam_axial_stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_axial_stress._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(beam_axial_stress._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + beam_axial_stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._integration_point = Input( + self._integration_point: Input[int] = Input( beam_axial_stress._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._integration_point) - self._unit_system = Input(beam_axial_stress._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + beam_axial_stress._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -278,7 +296,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -299,7 +317,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -320,7 +338,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -341,7 +359,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def integration_point(self) -> Input: + def integration_point(self) -> Input[int]: r"""Allows to connect integration_point input to the operator. integration point where the result will be read from. Default value: 0 (first integration point). @@ -362,7 +380,7 @@ def integration_point(self) -> Input: return self._integration_point @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -397,11 +415,13 @@ class OutputsBeamAxialStress(_Outputs): def __init__(self, op: Operator): super().__init__(beam_axial_stress._spec().outputs, op) - self._fields_container = Output(beam_axial_stress._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + beam_axial_stress._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_axial_total_strain.py b/src/ansys/dpf/core/operators/result/beam_axial_total_strain.py index c8c81aae592..effd438e248 100644 --- a/src/ansys/dpf/core/operators/result/beam_axial_total_strain.py +++ b/src/ansys/dpf/core/operators/result/beam_axial_total_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_axial_total_strain(Operator): r"""Read Beam X Axial Total strain by calling the readers defined by the @@ -239,33 +249,33 @@ class InputsBeamAxialTotalStrain(_Inputs): def __init__(self, op: Operator): super().__init__(beam_axial_total_strain._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( beam_axial_total_strain._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( beam_axial_total_strain._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_axial_total_strain._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( beam_axial_total_strain._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._integration_point = Input( + self._integration_point: Input[int] = Input( beam_axial_total_strain._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._integration_point) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( beam_axial_total_strain._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -286,7 +296,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -307,7 +317,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -328,7 +338,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -349,7 +359,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def integration_point(self) -> Input: + def integration_point(self) -> Input[int]: r"""Allows to connect integration_point input to the operator. integration point where the result will be read from. Default value: 0 (first integration point). @@ -370,7 +380,7 @@ def integration_point(self) -> Input: return self._integration_point @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -405,13 +415,13 @@ class OutputsBeamAxialTotalStrain(_Outputs): def __init__(self, op: Operator): super().__init__(beam_axial_total_strain._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( beam_axial_total_strain._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_rs_shear_stress.py b/src/ansys/dpf/core/operators/result/beam_rs_shear_stress.py index a0d1b84bfb3..9c6919d90a8 100644 --- a/src/ansys/dpf/core/operators/result/beam_rs_shear_stress.py +++ b/src/ansys/dpf/core/operators/result/beam_rs_shear_stress.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_rs_shear_stress(Operator): r"""Read Beam RS Shear Stress (LSDyna) by calling the readers defined by the @@ -239,27 +249,33 @@ class InputsBeamRsShearStress(_Inputs): def __init__(self, op: Operator): super().__init__(beam_rs_shear_stress._spec().inputs, op) - self._time_scoping = Input(beam_rs_shear_stress._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + beam_rs_shear_stress._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(beam_rs_shear_stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + beam_rs_shear_stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_rs_shear_stress._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(beam_rs_shear_stress._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + beam_rs_shear_stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._integration_point = Input( + self._integration_point: Input[int] = Input( beam_rs_shear_stress._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._integration_point) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( beam_rs_shear_stress._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -280,7 +296,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -301,7 +317,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -322,7 +338,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -343,7 +359,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def integration_point(self) -> Input: + def integration_point(self) -> Input[int]: r"""Allows to connect integration_point input to the operator. integration point where the result will be read from. Default value: 0 (first integration point). @@ -364,7 +380,7 @@ def integration_point(self) -> Input: return self._integration_point @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -399,13 +415,13 @@ class OutputsBeamRsShearStress(_Outputs): def __init__(self, op: Operator): super().__init__(beam_rs_shear_stress._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( beam_rs_shear_stress._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_s_bending_moment.py b/src/ansys/dpf/core/operators/result/beam_s_bending_moment.py index 8d519755afb..90e22f3d7b8 100644 --- a/src/ansys/dpf/core/operators/result/beam_s_bending_moment.py +++ b/src/ansys/dpf/core/operators/result/beam_s_bending_moment.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_s_bending_moment(Operator): r"""Read Beam S/Y Bending Moment by calling the readers defined by the @@ -223,29 +233,29 @@ class InputsBeamSBendingMoment(_Inputs): def __init__(self, op: Operator): super().__init__(beam_s_bending_moment._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( beam_s_bending_moment._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( beam_s_bending_moment._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_s_bending_moment._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( beam_s_bending_moment._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( beam_s_bending_moment._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -266,7 +276,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -287,7 +297,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -308,7 +318,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -329,7 +339,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -364,13 +374,13 @@ class OutputsBeamSBendingMoment(_Outputs): def __init__(self, op: Operator): super().__init__(beam_s_bending_moment._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( beam_s_bending_moment._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_s_shear_force.py b/src/ansys/dpf/core/operators/result/beam_s_shear_force.py index c3dbc7a10d0..b44d0d416d3 100644 --- a/src/ansys/dpf/core/operators/result/beam_s_shear_force.py +++ b/src/ansys/dpf/core/operators/result/beam_s_shear_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_s_shear_force(Operator): r"""Read Beam S/Y Shear Force by calling the readers defined by the @@ -223,21 +233,29 @@ class InputsBeamSShearForce(_Inputs): def __init__(self, op: Operator): super().__init__(beam_s_shear_force._spec().inputs, op) - self._time_scoping = Input(beam_s_shear_force._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + beam_s_shear_force._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(beam_s_shear_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + beam_s_shear_force._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_s_shear_force._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(beam_s_shear_force._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + beam_s_shear_force._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(beam_s_shear_force._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + beam_s_shear_force._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -258,7 +276,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -279,7 +297,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -300,7 +318,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -321,7 +339,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -356,11 +374,13 @@ class OutputsBeamSShearForce(_Outputs): def __init__(self, op: Operator): super().__init__(beam_s_shear_force._spec().outputs, op) - self._fields_container = Output(beam_s_shear_force._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + beam_s_shear_force._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_t_bending_moment.py b/src/ansys/dpf/core/operators/result/beam_t_bending_moment.py index d9d48370c7e..9f395071d71 100644 --- a/src/ansys/dpf/core/operators/result/beam_t_bending_moment.py +++ b/src/ansys/dpf/core/operators/result/beam_t_bending_moment.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_t_bending_moment(Operator): r"""Read Beam T/Z Bending Moment by calling the readers defined by the @@ -223,29 +233,29 @@ class InputsBeamTBendingMoment(_Inputs): def __init__(self, op: Operator): super().__init__(beam_t_bending_moment._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( beam_t_bending_moment._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( beam_t_bending_moment._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_t_bending_moment._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( beam_t_bending_moment._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( beam_t_bending_moment._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -266,7 +276,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -287,7 +297,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -308,7 +318,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -329,7 +339,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -364,13 +374,13 @@ class OutputsBeamTBendingMoment(_Outputs): def __init__(self, op: Operator): super().__init__(beam_t_bending_moment._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( beam_t_bending_moment._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_t_shear_force.py b/src/ansys/dpf/core/operators/result/beam_t_shear_force.py index ae4b738c24a..2ade0498d90 100644 --- a/src/ansys/dpf/core/operators/result/beam_t_shear_force.py +++ b/src/ansys/dpf/core/operators/result/beam_t_shear_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_t_shear_force(Operator): r"""Read Beam T/Z Shear Force by calling the readers defined by the @@ -223,21 +233,29 @@ class InputsBeamTShearForce(_Inputs): def __init__(self, op: Operator): super().__init__(beam_t_shear_force._spec().inputs, op) - self._time_scoping = Input(beam_t_shear_force._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + beam_t_shear_force._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(beam_t_shear_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + beam_t_shear_force._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_t_shear_force._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(beam_t_shear_force._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + beam_t_shear_force._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(beam_t_shear_force._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + beam_t_shear_force._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -258,7 +276,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -279,7 +297,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -300,7 +318,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -321,7 +339,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -356,11 +374,13 @@ class OutputsBeamTShearForce(_Outputs): def __init__(self, op: Operator): super().__init__(beam_t_shear_force._spec().outputs, op) - self._fields_container = Output(beam_t_shear_force._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + beam_t_shear_force._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_torsional_moment.py b/src/ansys/dpf/core/operators/result/beam_torsional_moment.py index 1287918941a..5238c6cb690 100644 --- a/src/ansys/dpf/core/operators/result/beam_torsional_moment.py +++ b/src/ansys/dpf/core/operators/result/beam_torsional_moment.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_torsional_moment(Operator): r"""Read Beam Torsional Moment by calling the readers defined by the @@ -223,29 +233,29 @@ class InputsBeamTorsionalMoment(_Inputs): def __init__(self, op: Operator): super().__init__(beam_torsional_moment._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( beam_torsional_moment._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( beam_torsional_moment._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_torsional_moment._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( beam_torsional_moment._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( beam_torsional_moment._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -266,7 +276,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -287,7 +297,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -308,7 +318,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -329,7 +339,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -364,13 +374,13 @@ class OutputsBeamTorsionalMoment(_Outputs): def __init__(self, op: Operator): super().__init__(beam_torsional_moment._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( beam_torsional_moment._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/beam_tr_shear_stress.py b/src/ansys/dpf/core/operators/result/beam_tr_shear_stress.py index 7d93560aad7..6f8dfdbcd36 100644 --- a/src/ansys/dpf/core/operators/result/beam_tr_shear_stress.py +++ b/src/ansys/dpf/core/operators/result/beam_tr_shear_stress.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class beam_tr_shear_stress(Operator): r"""Read Beam TR Shear Stress (LSDyna) by calling the readers defined by the @@ -239,27 +249,33 @@ class InputsBeamTrShearStress(_Inputs): def __init__(self, op: Operator): super().__init__(beam_tr_shear_stress._spec().inputs, op) - self._time_scoping = Input(beam_tr_shear_stress._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + beam_tr_shear_stress._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(beam_tr_shear_stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + beam_tr_shear_stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( beam_tr_shear_stress._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(beam_tr_shear_stress._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + beam_tr_shear_stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._integration_point = Input( + self._integration_point: Input[int] = Input( beam_tr_shear_stress._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._integration_point) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( beam_tr_shear_stress._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -280,7 +296,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. elements scoping required in output. @@ -301,7 +317,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -322,7 +338,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -343,7 +359,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def integration_point(self) -> Input: + def integration_point(self) -> Input[int]: r"""Allows to connect integration_point input to the operator. integration point where the result will be read from. Default value: 0 (first integration point). @@ -364,7 +380,7 @@ def integration_point(self) -> Input: return self._integration_point @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -399,13 +415,13 @@ class OutputsBeamTrShearStress(_Outputs): def __init__(self, op: Operator): super().__init__(beam_tr_shear_stress._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( beam_tr_shear_stress._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/cgns_result_provider.py b/src/ansys/dpf/core/operators/result/cgns_result_provider.py index 6b586589a88..e18416e40f6 100644 --- a/src/ansys/dpf/core/operators/result/cgns_result_provider.py +++ b/src/ansys/dpf/core/operators/result/cgns_result_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class cgns_result_provider(Operator): r"""Read/compute names result from result streams. @@ -231,27 +239,33 @@ class InputsCgnsResultProvider(_Inputs): def __init__(self, op: Operator): super().__init__(cgns_result_provider._spec().inputs, op) - self._time_scoping = Input(cgns_result_provider._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping] = Input( + cgns_result_provider._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cgns_result_provider._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( + cgns_result_provider._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( cgns_result_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(cgns_result_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + cgns_result_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._result_name = Input( + self._result_name: Input[str] = Input( cgns_result_provider._spec().input_pin(17), 17, op, -1 ) self._inputs.append(self._result_name) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( cgns_result_provider._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output @@ -272,7 +286,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains @@ -293,7 +307,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -314,7 +328,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -335,7 +349,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def result_name(self) -> Input: + def result_name(self) -> Input[str]: r"""Allows to connect result_name input to the operator. name of the result to read. By default the name of the operator is taken. @@ -356,7 +370,7 @@ def result_name(self) -> Input: return self._result_name @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. Optional zone name/Id of the mesh. @@ -391,11 +405,13 @@ class OutputsCgnsResultProvider(_Outputs): def __init__(self, op: Operator): super().__init__(cgns_result_provider._spec().outputs, op) - self._fields = Output(cgns_result_provider._spec().output_pin(0), 0, op) + self._fields: Output[FieldsContainer] = Output( + cgns_result_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields) @property - def fields(self) -> Output: + def fields(self) -> Output[FieldsContainer]: r"""Allows to get fields output of the operator Results diff --git a/src/ansys/dpf/core/operators/result/cms_dst_table_provider.py b/src/ansys/dpf/core/operators/result/cms_dst_table_provider.py index f509984a484..335c52c706a 100644 --- a/src/ansys/dpf/core/operators/result/cms_dst_table_provider.py +++ b/src/ansys/dpf/core/operators/result/cms_dst_table_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.property_field import PropertyField + class cms_dst_table_provider(Operator): r"""Read CST table from a subfile. @@ -142,13 +147,13 @@ class InputsCmsDstTableProvider(_Inputs): def __init__(self, op: Operator): super().__init__(cms_dst_table_provider._spec().inputs, op) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( cms_dst_table_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data_sources (must contain at least one subfile). @@ -183,11 +188,13 @@ class OutputsCmsDstTableProvider(_Outputs): def __init__(self, op: Operator): super().__init__(cms_dst_table_provider._spec().outputs, op) - self._dst_table = Output(cms_dst_table_provider._spec().output_pin(0), 0, op) + self._dst_table: Output[PropertyField] = Output( + cms_dst_table_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._dst_table) @property - def dst_table(self) -> Output: + def dst_table(self) -> Output[PropertyField]: r"""Allows to get dst_table output of the operator returns integer values of the dst table diff --git a/src/ansys/dpf/core/operators/result/cms_matrices_provider.py b/src/ansys/dpf/core/operators/result/cms_matrices_provider.py index f46f257d3e1..18726856a3d 100644 --- a/src/ansys/dpf/core/operators/result/cms_matrices_provider.py +++ b/src/ansys/dpf/core/operators/result/cms_matrices_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + class cms_matrices_provider(Operator): r"""Read reduced matrices for cms elements. Extract stiffness, damping, mass @@ -159,17 +164,17 @@ class InputsCmsMatricesProvider(_Inputs): def __init__(self, op: Operator): super().__init__(cms_matrices_provider._spec().inputs, op) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( cms_matrices_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._matrix_form = Input( + self._matrix_form: Input[bool] = Input( cms_matrices_provider._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._matrix_form) @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data_sources (must contain at list one subfile). @@ -190,7 +195,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def matrix_form(self) -> Input: + def matrix_form(self) -> Input[bool]: r"""Allows to connect matrix_form input to the operator. If this pin i set to true, data are return as matrix form. @@ -225,13 +230,13 @@ class OutputsCmsMatricesProvider(_Outputs): def __init__(self, op: Operator): super().__init__(cms_matrices_provider._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( cms_matrices_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Fields container containing in this order : stiffness, damping, mass matrices, and then load vector. But if pin 200 is set to true, it's in matrix form. diff --git a/src/ansys/dpf/core/operators/result/cms_subfile_info_provider.py b/src/ansys/dpf/core/operators/result/cms_subfile_info_provider.py index 28891ba77b9..30c6a1286bf 100644 --- a/src/ansys/dpf/core/operators/result/cms_subfile_info_provider.py +++ b/src/ansys/dpf/core/operators/result/cms_subfile_info_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.property_field import PropertyField + class cms_subfile_info_provider(Operator): r"""Read required information from a subfile. @@ -190,21 +195,21 @@ class InputsCmsSubfileInfoProvider(_Inputs): def __init__(self, op: Operator): super().__init__(cms_subfile_info_provider._spec().inputs, op) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( cms_subfile_info_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._cms_subfile_data = Input( + self._cms_subfile_data: Input[bool] = Input( cms_subfile_info_provider._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._cms_subfile_data) - self._output_maxdof_on_masternodes = Input( + self._output_maxdof_on_masternodes: Input[bool] = Input( cms_subfile_info_provider._spec().input_pin(300), 300, op, -1 ) self._inputs.append(self._output_maxdof_on_masternodes) @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data_sources (must contain at least one subfile). @@ -225,7 +230,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def cms_subfile_data(self) -> Input: + def cms_subfile_data(self) -> Input[bool]: r"""Allows to connect cms_subfile_data input to the operator. If this pin i set to true, data are return in a field. @@ -246,7 +251,7 @@ def cms_subfile_data(self) -> Input: return self._cms_subfile_data @property - def output_maxdof_on_masternodes(self) -> Input: + def output_maxdof_on_masternodes(self) -> Input[bool]: r"""Allows to connect output_maxdof_on_masternodes input to the operator. If this pin is set to true, compute and add field with max degrees of freedom on master nodes @@ -282,13 +287,17 @@ class OutputsCmsSubfileInfoProvider(_Outputs): def __init__(self, op: Operator): super().__init__(cms_subfile_info_provider._spec().outputs, op) - self._int32 = Output(cms_subfile_info_provider._spec().output_pin(0), 0, op) + self._int32: Output[int] = Output( + cms_subfile_info_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._int32) - self._field = Output(cms_subfile_info_provider._spec().output_pin(1), 1, op) + self._field: Output[PropertyField] = Output( + cms_subfile_info_provider._spec().output_pin(1), 1, op + ) self._outputs.append(self._field) @property - def int32(self) -> Output: + def int32(self) -> Output[int]: r"""Allows to get int32 output of the operator returns integer values in the order : unit system used, stiffness matrix present key, damping matrix present key, mass matrix present key, number of master nodes, number of virtual nodes @@ -308,7 +317,7 @@ def int32(self) -> Output: return self._int32 @property - def field(self) -> Output: + def field(self) -> Output[PropertyField]: r"""Allows to get field output of the operator returns integer values in the order : number of load vectors (nvects), number of nodes (nnod), number of virtual nodes (nvnodes), number of modes (nvmodes) diff --git a/src/ansys/dpf/core/operators/result/co_energy.py b/src/ansys/dpf/core/operators/result/co_energy.py index c7b7f0714f9..a400c2db0ba 100644 --- a/src/ansys/dpf/core/operators/result/co_energy.py +++ b/src/ansys/dpf/core/operators/result/co_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class co_energy(Operator): r"""Read/compute co-energy (magnetics) by calling the readers defined by the @@ -251,23 +262,37 @@ class InputsCoEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(co_energy._spec().inputs, op) - self._time_scoping = Input(co_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + co_energy._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(co_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + co_energy._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(co_energy._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + co_energy._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(co_energy._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + co_energy._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(co_energy._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + co_energy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(co_energy._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + co_energy._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(co_energy._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + co_energy._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -288,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -309,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -330,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -351,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -372,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -393,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -428,11 +453,13 @@ class OutputsCoEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(co_energy._spec().outputs, op) - self._fields_container = Output(co_energy._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + co_energy._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/compute_invariant_terms_motion.py b/src/ansys/dpf/core/operators/result/compute_invariant_terms_motion.py index fd10bbe836e..cc72e9e9228 100644 --- a/src/ansys/dpf/core/operators/result/compute_invariant_terms_motion.py +++ b/src/ansys/dpf/core/operators/result/compute_invariant_terms_motion.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.property_field import PropertyField + class compute_invariant_terms_motion(Operator): r"""Set the required data for the invariant terms computation (reduced @@ -379,37 +385,37 @@ class InputsComputeInvariantTermsMotion(_Inputs): def __init__(self, op: Operator): super().__init__(compute_invariant_terms_motion._spec().inputs, op) - self._rom_matrices = Input( + self._rom_matrices: Input[FieldsContainer] = Input( compute_invariant_terms_motion._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._rom_matrices) - self._mode_shapes = Input( + self._mode_shapes: Input[FieldsContainer] = Input( compute_invariant_terms_motion._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mode_shapes) - self._lumped_mass = Input( + self._lumped_mass: Input[FieldsContainer] = Input( compute_invariant_terms_motion._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._lumped_mass) - self._model_data = Input( + self._model_data: Input[FieldsContainer] = Input( compute_invariant_terms_motion._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._model_data) - self._field_coordinates = Input( + self._field_coordinates: Input[Field] = Input( compute_invariant_terms_motion._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._field_coordinates) - self._nod = Input( + self._nod: Input = Input( compute_invariant_terms_motion._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._nod) - self._phi_ortho = Input( + self._phi_ortho: Input[FieldsContainer] = Input( compute_invariant_terms_motion._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._phi_ortho) @property - def rom_matrices(self) -> Input: + def rom_matrices(self) -> Input[FieldsContainer]: r"""Allows to connect rom_matrices input to the operator. FieldsContainers containing the reduced matrices @@ -430,7 +436,7 @@ def rom_matrices(self) -> Input: return self._rom_matrices @property - def mode_shapes(self) -> Input: + def mode_shapes(self) -> Input[FieldsContainer]: r"""Allows to connect mode_shapes input to the operator. FieldsContainers containing the mode shapes, which are CST and NOR for the cms method @@ -451,7 +457,7 @@ def mode_shapes(self) -> Input: return self._mode_shapes @property - def lumped_mass(self) -> Input: + def lumped_mass(self) -> Input[FieldsContainer]: r"""Allows to connect lumped_mass input to the operator. FieldsContainers containing the lumped mass @@ -472,7 +478,7 @@ def lumped_mass(self) -> Input: return self._lumped_mass @property - def model_data(self) -> Input: + def model_data(self) -> Input[FieldsContainer]: r"""Allows to connect model_data input to the operator. data describing the finite element model @@ -493,7 +499,7 @@ def model_data(self) -> Input: return self._model_data @property - def field_coordinates(self) -> Input: + def field_coordinates(self) -> Input[Field]: r"""Allows to connect field_coordinates input to the operator. coordinates of all nodes @@ -533,7 +539,7 @@ def nod(self) -> Input: return self._nod @property - def phi_ortho(self) -> Input: + def phi_ortho(self) -> Input[FieldsContainer]: r"""Allows to connect phi_ortho input to the operator. Orthonormalizated modes transformation @@ -584,75 +590,77 @@ class OutputsComputeInvariantTermsMotion(_Outputs): def __init__(self, op: Operator): super().__init__(compute_invariant_terms_motion._spec().outputs, op) - self._model_data = Output( + self._model_data: Output[PropertyField] = Output( compute_invariant_terms_motion._spec().output_pin(0), 0, op ) self._outputs.append(self._model_data) - self._mode_shapes = Output( + self._mode_shapes: Output[FieldsContainer] = Output( compute_invariant_terms_motion._spec().output_pin(1), 1, op ) self._outputs.append(self._mode_shapes) - self._lumped_mass = Output( + self._lumped_mass: Output[FieldsContainer] = Output( compute_invariant_terms_motion._spec().output_pin(2), 2, op ) self._outputs.append(self._lumped_mass) - self._field_coordinates_and_euler_angles = Output( + self._field_coordinates_and_euler_angles: Output[FieldsContainer] = Output( compute_invariant_terms_motion._spec().output_pin(3), 3, op ) self._outputs.append(self._field_coordinates_and_euler_angles) - self._nod = Output(compute_invariant_terms_motion._spec().output_pin(4), 4, op) + self._nod: Output = Output( + compute_invariant_terms_motion._spec().output_pin(4), 4, op + ) self._outputs.append(self._nod) - self._used_node_index = Output( + self._used_node_index: Output = Output( compute_invariant_terms_motion._spec().output_pin(5), 5, op ) self._outputs.append(self._used_node_index) - self._eigenvalue = Output( + self._eigenvalue: Output = Output( compute_invariant_terms_motion._spec().output_pin(6), 6, op ) self._outputs.append(self._eigenvalue) - self._translational_mode_shape = Output( + self._translational_mode_shape: Output = Output( compute_invariant_terms_motion._spec().output_pin(7), 7, op ) self._outputs.append(self._translational_mode_shape) - self._rotational_mode_shape = Output( + self._rotational_mode_shape: Output = Output( compute_invariant_terms_motion._spec().output_pin(8), 8, op ) self._outputs.append(self._rotational_mode_shape) - self._invrt_1 = Output( + self._invrt_1: Output[float] = Output( compute_invariant_terms_motion._spec().output_pin(9), 9, op ) self._outputs.append(self._invrt_1) - self._invrt_2 = Output( + self._invrt_2: Output = Output( compute_invariant_terms_motion._spec().output_pin(10), 10, op ) self._outputs.append(self._invrt_2) - self._invrt_3 = Output( + self._invrt_3: Output = Output( compute_invariant_terms_motion._spec().output_pin(11), 11, op ) self._outputs.append(self._invrt_3) - self._invrt_4 = Output( + self._invrt_4: Output = Output( compute_invariant_terms_motion._spec().output_pin(12), 12, op ) self._outputs.append(self._invrt_4) - self._invrt_5 = Output( + self._invrt_5: Output = Output( compute_invariant_terms_motion._spec().output_pin(13), 13, op ) self._outputs.append(self._invrt_5) - self._invrt_6 = Output( + self._invrt_6: Output = Output( compute_invariant_terms_motion._spec().output_pin(14), 14, op ) self._outputs.append(self._invrt_6) - self._invrt_7 = Output( + self._invrt_7: Output = Output( compute_invariant_terms_motion._spec().output_pin(15), 15, op ) self._outputs.append(self._invrt_7) - self._invrt_8 = Output( + self._invrt_8: Output = Output( compute_invariant_terms_motion._spec().output_pin(16), 16, op ) self._outputs.append(self._invrt_8) @property - def model_data(self) -> Output: + def model_data(self) -> Output[PropertyField]: r"""Allows to get model_data output of the operator data describing the finite element model @@ -672,7 +680,7 @@ def model_data(self) -> Output: return self._model_data @property - def mode_shapes(self) -> Output: + def mode_shapes(self) -> Output[FieldsContainer]: r"""Allows to get mode_shapes output of the operator FieldsContainers containing the mode shapes, which are CST and NOR for the cms method @@ -692,7 +700,7 @@ def mode_shapes(self) -> Output: return self._mode_shapes @property - def lumped_mass(self) -> Output: + def lumped_mass(self) -> Output[FieldsContainer]: r"""Allows to get lumped_mass output of the operator FieldsContainers containing the lumped mass @@ -712,7 +720,7 @@ def lumped_mass(self) -> Output: return self._lumped_mass @property - def field_coordinates_and_euler_angles(self) -> Output: + def field_coordinates_and_euler_angles(self) -> Output[FieldsContainer]: r"""Allows to get field_coordinates_and_euler_angles output of the operator coordinates and euler angles of all nodes @@ -822,7 +830,7 @@ def rotational_mode_shape(self) -> Output: return self._rotational_mode_shape @property - def invrt_1(self) -> Output: + def invrt_1(self) -> Output[float]: r"""Allows to get invrt_1 output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/compute_invariant_terms_rbd.py b/src/ansys/dpf/core/operators/result/compute_invariant_terms_rbd.py index 297f83988d1..d873ee7f129 100644 --- a/src/ansys/dpf/core/operators/result/compute_invariant_terms_rbd.py +++ b/src/ansys/dpf/core/operators/result/compute_invariant_terms_rbd.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.property_field import PropertyField + class compute_invariant_terms_rbd(Operator): r"""Set the required data for the invariant terms computation (reduced @@ -474,47 +480,49 @@ class InputsComputeInvariantTermsRbd(_Inputs): def __init__(self, op: Operator): super().__init__(compute_invariant_terms_rbd._spec().inputs, op) - self._rom_matrices = Input( + self._rom_matrices: Input[FieldsContainer] = Input( compute_invariant_terms_rbd._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._rom_matrices) - self._mode_shapes = Input( + self._mode_shapes: Input[FieldsContainer] = Input( compute_invariant_terms_rbd._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mode_shapes) - self._lumped_mass = Input( + self._lumped_mass: Input[FieldsContainer] = Input( compute_invariant_terms_rbd._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._lumped_mass) - self._model_data = Input( + self._model_data: Input[FieldsContainer] = Input( compute_invariant_terms_rbd._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._model_data) - self._center_of_mass = Input( + self._center_of_mass: Input[FieldsContainer] = Input( compute_invariant_terms_rbd._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._center_of_mass) - self._inertia_relief = Input( + self._inertia_relief: Input[FieldsContainer] = Input( compute_invariant_terms_rbd._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._inertia_relief) - self._model_size = Input( + self._model_size: Input[float] = Input( compute_invariant_terms_rbd._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._model_size) - self._field_coordinates = Input( + self._field_coordinates: Input[Field] = Input( compute_invariant_terms_rbd._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._field_coordinates) - self._nod = Input(compute_invariant_terms_rbd._spec().input_pin(8), 8, op, -1) + self._nod: Input = Input( + compute_invariant_terms_rbd._spec().input_pin(8), 8, op, -1 + ) self._inputs.append(self._nod) - self._constraint_mode_check = Input( + self._constraint_mode_check: Input[bool] = Input( compute_invariant_terms_rbd._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._constraint_mode_check) @property - def rom_matrices(self) -> Input: + def rom_matrices(self) -> Input[FieldsContainer]: r"""Allows to connect rom_matrices input to the operator. FieldsContainers containing the reduced matrices @@ -535,7 +543,7 @@ def rom_matrices(self) -> Input: return self._rom_matrices @property - def mode_shapes(self) -> Input: + def mode_shapes(self) -> Input[FieldsContainer]: r"""Allows to connect mode_shapes input to the operator. FieldsContainers containing the mode shapes, which are CST and NOR for the cms method @@ -556,7 +564,7 @@ def mode_shapes(self) -> Input: return self._mode_shapes @property - def lumped_mass(self) -> Input: + def lumped_mass(self) -> Input[FieldsContainer]: r"""Allows to connect lumped_mass input to the operator. FieldsContainers containing the lumped mass @@ -577,7 +585,7 @@ def lumped_mass(self) -> Input: return self._lumped_mass @property - def model_data(self) -> Input: + def model_data(self) -> Input[FieldsContainer]: r"""Allows to connect model_data input to the operator. data describing the finite element model @@ -598,7 +606,7 @@ def model_data(self) -> Input: return self._model_data @property - def center_of_mass(self) -> Input: + def center_of_mass(self) -> Input[FieldsContainer]: r"""Allows to connect center_of_mass input to the operator. Returns @@ -617,7 +625,7 @@ def center_of_mass(self) -> Input: return self._center_of_mass @property - def inertia_relief(self) -> Input: + def inertia_relief(self) -> Input[FieldsContainer]: r"""Allows to connect inertia_relief input to the operator. inertia matrix @@ -638,7 +646,7 @@ def inertia_relief(self) -> Input: return self._inertia_relief @property - def model_size(self) -> Input: + def model_size(self) -> Input[float]: r"""Allows to connect model_size input to the operator. model size @@ -659,7 +667,7 @@ def model_size(self) -> Input: return self._model_size @property - def field_coordinates(self) -> Input: + def field_coordinates(self) -> Input[Field]: r"""Allows to connect field_coordinates input to the operator. coordinates of all nodes @@ -699,7 +707,7 @@ def nod(self) -> Input: return self._nod @property - def constraint_mode_check(self) -> Input: + def constraint_mode_check(self) -> Input[bool]: r"""Allows to connect constraint_mode_check input to the operator. if true, the orthogonality of the constraint modes are checked. Default is false. @@ -756,71 +764,101 @@ class OutputsComputeInvariantTermsRbd(_Outputs): def __init__(self, op: Operator): super().__init__(compute_invariant_terms_rbd._spec().outputs, op) - self._model_data = Output( + self._model_data: Output[PropertyField] = Output( compute_invariant_terms_rbd._spec().output_pin(0), 0, op ) self._outputs.append(self._model_data) - self._center_of_mass = Output( + self._center_of_mass: Output[Field] = Output( compute_invariant_terms_rbd._spec().output_pin(1), 1, op ) self._outputs.append(self._center_of_mass) - self._inertia_relief = Output( + self._inertia_relief: Output[Field] = Output( compute_invariant_terms_rbd._spec().output_pin(2), 2, op ) self._outputs.append(self._inertia_relief) - self._model_size = Output( + self._model_size: Output[PropertyField] = Output( compute_invariant_terms_rbd._spec().output_pin(3), 3, op ) self._outputs.append(self._model_size) - self._master_node_coordinates = Output( + self._master_node_coordinates: Output = Output( compute_invariant_terms_rbd._spec().output_pin(4), 4, op ) self._outputs.append(self._master_node_coordinates) - self._v_trsf = Output(compute_invariant_terms_rbd._spec().output_pin(5), 5, op) + self._v_trsf: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(5), 5, op + ) self._outputs.append(self._v_trsf) - self._k_mat = Output(compute_invariant_terms_rbd._spec().output_pin(6), 6, op) + self._k_mat: Output[Field] = Output( + compute_invariant_terms_rbd._spec().output_pin(6), 6, op + ) self._outputs.append(self._k_mat) - self._mass_mat = Output( + self._mass_mat: Output[Field] = Output( compute_invariant_terms_rbd._spec().output_pin(7), 7, op ) self._outputs.append(self._mass_mat) - self._c_mat = Output(compute_invariant_terms_rbd._spec().output_pin(8), 8, op) + self._c_mat: Output[Field] = Output( + compute_invariant_terms_rbd._spec().output_pin(8), 8, op + ) self._outputs.append(self._c_mat) - self._rhs = Output(compute_invariant_terms_rbd._spec().output_pin(9), 9, op) + self._rhs: Output[Field] = Output( + compute_invariant_terms_rbd._spec().output_pin(9), 9, op + ) self._outputs.append(self._rhs) - self._dn = Output(compute_invariant_terms_rbd._spec().output_pin(10), 10, op) + self._dn: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(10), 10, op + ) self._outputs.append(self._dn) - self._dr_cross_n = Output( + self._dr_cross_n: Output = Output( compute_invariant_terms_rbd._spec().output_pin(11), 11, op ) self._outputs.append(self._dr_cross_n) - self._drn = Output(compute_invariant_terms_rbd._spec().output_pin(12), 12, op) + self._drn: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(12), 12, op + ) self._outputs.append(self._drn) - self._dn_cross_n = Output( + self._dn_cross_n: Output = Output( compute_invariant_terms_rbd._spec().output_pin(13), 13, op ) self._outputs.append(self._dn_cross_n) - self._dnx_y = Output(compute_invariant_terms_rbd._spec().output_pin(14), 14, op) + self._dnx_y: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(14), 14, op + ) self._outputs.append(self._dnx_y) - self._dny_y = Output(compute_invariant_terms_rbd._spec().output_pin(15), 15, op) + self._dny_y: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(15), 15, op + ) self._outputs.append(self._dny_y) - self._dnz_y = Output(compute_invariant_terms_rbd._spec().output_pin(16), 16, op) + self._dnz_y: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(16), 16, op + ) self._outputs.append(self._dnz_y) - self._dyx_n = Output(compute_invariant_terms_rbd._spec().output_pin(17), 17, op) + self._dyx_n: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(17), 17, op + ) self._outputs.append(self._dyx_n) - self._dyy_n = Output(compute_invariant_terms_rbd._spec().output_pin(18), 18, op) + self._dyy_n: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(18), 18, op + ) self._outputs.append(self._dyy_n) - self._dyz_n = Output(compute_invariant_terms_rbd._spec().output_pin(19), 19, op) + self._dyz_n: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(19), 19, op + ) self._outputs.append(self._dyz_n) - self._dnxn = Output(compute_invariant_terms_rbd._spec().output_pin(20), 20, op) + self._dnxn: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(20), 20, op + ) self._outputs.append(self._dnxn) - self._dnyn = Output(compute_invariant_terms_rbd._spec().output_pin(21), 21, op) + self._dnyn: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(21), 21, op + ) self._outputs.append(self._dnyn) - self._dnzn = Output(compute_invariant_terms_rbd._spec().output_pin(22), 22, op) + self._dnzn: Output = Output( + compute_invariant_terms_rbd._spec().output_pin(22), 22, op + ) self._outputs.append(self._dnzn) @property - def model_data(self) -> Output: + def model_data(self) -> Output[PropertyField]: r"""Allows to get model_data output of the operator data describing the finite element model @@ -840,7 +878,7 @@ def model_data(self) -> Output: return self._model_data @property - def center_of_mass(self) -> Output: + def center_of_mass(self) -> Output[Field]: r"""Allows to get center_of_mass output of the operator center of mass of the body @@ -860,7 +898,7 @@ def center_of_mass(self) -> Output: return self._center_of_mass @property - def inertia_relief(self) -> Output: + def inertia_relief(self) -> Output[Field]: r"""Allows to get inertia_relief output of the operator inertia matrix @@ -880,7 +918,7 @@ def inertia_relief(self) -> Output: return self._inertia_relief @property - def model_size(self) -> Output: + def model_size(self) -> Output[PropertyField]: r"""Allows to get model_size output of the operator Returns @@ -936,7 +974,7 @@ def v_trsf(self) -> Output: return self._v_trsf @property - def k_mat(self) -> Output: + def k_mat(self) -> Output[Field]: r"""Allows to get k_mat output of the operator Returns @@ -954,7 +992,7 @@ def k_mat(self) -> Output: return self._k_mat @property - def mass_mat(self) -> Output: + def mass_mat(self) -> Output[Field]: r"""Allows to get mass_mat output of the operator Returns @@ -972,7 +1010,7 @@ def mass_mat(self) -> Output: return self._mass_mat @property - def c_mat(self) -> Output: + def c_mat(self) -> Output[Field]: r"""Allows to get c_mat output of the operator Returns @@ -990,7 +1028,7 @@ def c_mat(self) -> Output: return self._c_mat @property - def rhs(self) -> Output: + def rhs(self) -> Output[Field]: r"""Allows to get rhs output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/compute_stress.py b/src/ansys/dpf/core/operators/result/compute_stress.py index 2923237c42b..473751fe4ce 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress.py +++ b/src/ansys/dpf/core/operators/result/compute_stress.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -213,19 +221,29 @@ class InputsComputeStress(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress._spec().inputs, op) - self._scoping = Input(compute_stress._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input(compute_stress._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + compute_stress._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input(compute_stress._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + compute_stress._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -246,7 +264,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -267,7 +285,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -288,7 +306,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -309,7 +327,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -344,11 +362,13 @@ class OutputsComputeStress(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress._spec().outputs, op) - self._fields_container = Output(compute_stress._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_1.py b/src/ansys/dpf/core/operators/result/compute_stress_1.py index 0fc077a9b2d..c27fa3e261f 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_1.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_1.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_1(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,23 +223,29 @@ class InputsComputeStress1(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_1._spec().inputs, op) - self._scoping = Input(compute_stress_1._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_1._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_1._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress_1._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress_1._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_1._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_1._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_1._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -252,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -273,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -294,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -315,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -350,11 +364,13 @@ class OutputsComputeStress1(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_1._spec().outputs, op) - self._fields_container = Output(compute_stress_1._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress_1._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_2.py b/src/ansys/dpf/core/operators/result/compute_stress_2.py index 93b04b20286..74ae61efcaf 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_2.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_2.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_2(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,23 +223,29 @@ class InputsComputeStress2(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_2._spec().inputs, op) - self._scoping = Input(compute_stress_2._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_2._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_2._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress_2._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress_2._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_2._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_2._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_2._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -252,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -273,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -294,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -315,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -350,11 +364,13 @@ class OutputsComputeStress2(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_2._spec().outputs, op) - self._fields_container = Output(compute_stress_2._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress_2._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_3.py b/src/ansys/dpf/core/operators/result/compute_stress_3.py index b4674c8d385..0f086099af2 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_3.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_3.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_3(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,23 +223,29 @@ class InputsComputeStress3(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_3._spec().inputs, op) - self._scoping = Input(compute_stress_3._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_3._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_3._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress_3._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress_3._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_3._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_3._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_3._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -252,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -273,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -294,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -315,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -350,11 +364,13 @@ class OutputsComputeStress3(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_3._spec().outputs, op) - self._fields_container = Output(compute_stress_3._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress_3._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_X.py b/src/ansys/dpf/core/operators/result/compute_stress_X.py index 5cdcc5420af..6da17628973 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_X.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_X(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,23 +223,29 @@ class InputsComputeStressX(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_X._spec().inputs, op) - self._scoping = Input(compute_stress_X._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_X._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_X._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -252,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -273,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -294,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -315,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -350,11 +364,13 @@ class OutputsComputeStressX(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_X._spec().outputs, op) - self._fields_container = Output(compute_stress_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_XY.py b/src/ansys/dpf/core/operators/result/compute_stress_XY.py index 259ef1e2e9d..1cf29495859 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_XY.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_XY(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,23 +223,29 @@ class InputsComputeStressXy(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_XY._spec().inputs, op) - self._scoping = Input(compute_stress_XY._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_XY._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_XY._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress_XY._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress_XY._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_XY._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_XY._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -252,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -273,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -294,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -315,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -350,11 +364,13 @@ class OutputsComputeStressXy(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_XY._spec().outputs, op) - self._fields_container = Output(compute_stress_XY._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress_XY._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_XZ.py b/src/ansys/dpf/core/operators/result/compute_stress_XZ.py index 196b873f724..e216c82af21 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_XZ.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_XZ(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,23 +223,29 @@ class InputsComputeStressXz(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_XZ._spec().inputs, op) - self._scoping = Input(compute_stress_XZ._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_XZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_XZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress_XZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_XZ._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_XZ._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -252,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -273,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -294,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -315,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -350,11 +364,13 @@ class OutputsComputeStressXz(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_XZ._spec().outputs, op) - self._fields_container = Output(compute_stress_XZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress_XZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_Y.py b/src/ansys/dpf/core/operators/result/compute_stress_Y.py index 116e9b47ee7..89d37ab9a59 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_Y.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_Y(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,23 +223,29 @@ class InputsComputeStressY(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_Y._spec().inputs, op) - self._scoping = Input(compute_stress_Y._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_Y._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_Y._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -252,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -273,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -294,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -315,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -350,11 +364,13 @@ class OutputsComputeStressY(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_Y._spec().outputs, op) - self._fields_container = Output(compute_stress_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_YZ.py b/src/ansys/dpf/core/operators/result/compute_stress_YZ.py index 2d307a2da17..eef49f2ca86 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_YZ.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_YZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_YZ(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,23 +223,29 @@ class InputsComputeStressYz(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_YZ._spec().inputs, op) - self._scoping = Input(compute_stress_YZ._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_YZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_YZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress_YZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress_YZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_YZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_YZ._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_YZ._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -252,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -273,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -294,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -315,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -350,11 +364,13 @@ class OutputsComputeStressYz(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_YZ._spec().outputs, op) - self._fields_container = Output(compute_stress_YZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress_YZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_Z.py b/src/ansys/dpf/core/operators/result/compute_stress_Z.py index 30376fbb496..d607705c246 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_Z.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_Z(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,23 +223,29 @@ class InputsComputeStressZ(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_Z._spec().inputs, op) - self._scoping = Input(compute_stress_Z._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_stress_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_stress_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_Z._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_Z._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -252,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -273,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -294,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -315,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -350,11 +364,13 @@ class OutputsComputeStressZ(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_Z._spec().outputs, op) - self._fields_container = Output(compute_stress_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + compute_stress_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_stress_von_mises.py b/src/ansys/dpf/core/operators/result/compute_stress_von_mises.py index 5d9f1cb920e..cbbf10d9e94 100644 --- a/src/ansys/dpf/core/operators/result/compute_stress_von_mises.py +++ b/src/ansys/dpf/core/operators/result/compute_stress_von_mises.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_stress_von_mises(Operator): r"""Computes the stress from an elastic strain field. compute_total_strain @@ -215,25 +223,29 @@ class InputsComputeStressVonMises(_Inputs): def __init__(self, op: Operator): super().__init__(compute_stress_von_mises._spec().inputs, op) - self._scoping = Input(compute_stress_von_mises._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_stress_von_mises._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_stress_von_mises._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_stress_von_mises._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_stress_von_mises._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._strain = Input(compute_stress_von_mises._spec().input_pin(10), 10, op, -1) + self._strain: Input[FieldsContainer | Field] = Input( + compute_stress_von_mises._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._strain) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -254,7 +266,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Needed to get mesh and material ids. Optional if a data_sources have been connected. @@ -275,7 +287,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Needed to get mesh and material ids. Optional if a streams_container have been connected. @@ -296,7 +308,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -317,7 +329,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def strain(self) -> Input: + def strain(self) -> Input[FieldsContainer | Field]: r"""Allows to connect strain input to the operator. Field/or fields container containing only the elastic strain field (element nodal). @@ -352,13 +364,13 @@ class OutputsComputeStressVonMises(_Outputs): def __init__(self, op: Operator): super().__init__(compute_stress_von_mises._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_stress_von_mises._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain.py b/src/ansys/dpf/core/operators/result/compute_total_strain.py index ec9ce379071..ec636842884 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,35 +307,45 @@ class InputsComputeTotalStrain(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain._spec().inputs, op) - self._time_scoping = Input(compute_total_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + compute_total_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(compute_total_strain._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + compute_total_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._extrapolate = Input(compute_total_strain._spec().input_pin(5), 5, op, -1) + self._extrapolate: Input[int] = Input( + compute_total_strain._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -347,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -368,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -389,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -410,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -431,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -452,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -473,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -494,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -529,13 +548,13 @@ class OutputsComputeTotalStrain(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_1.py b/src/ansys/dpf/core/operators/result/compute_total_strain_1.py index 8cada982a2b..18cb56ebea9 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain_1.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain_1.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain_1(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,41 +307,45 @@ class InputsComputeTotalStrain1(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_1._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( compute_total_strain_1._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain_1._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain_1._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain_1._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_total_strain_1._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._extrapolate = Input( + self._extrapolate: Input[int] = Input( compute_total_strain_1._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain_1._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain_1._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain_1._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain_1._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain_1._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -353,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -374,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -395,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -416,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -437,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -458,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -479,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -500,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -535,13 +548,13 @@ class OutputsComputeTotalStrain1(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_1._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain_1._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_2.py b/src/ansys/dpf/core/operators/result/compute_total_strain_2.py index c28a277b5ef..6bda5755f61 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain_2.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain_2.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain_2(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,41 +307,45 @@ class InputsComputeTotalStrain2(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_2._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( compute_total_strain_2._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain_2._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain_2._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain_2._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_total_strain_2._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._extrapolate = Input( + self._extrapolate: Input[int] = Input( compute_total_strain_2._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain_2._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain_2._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain_2._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain_2._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain_2._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -353,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -374,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -395,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -416,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -437,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -458,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -479,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -500,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -535,13 +548,13 @@ class OutputsComputeTotalStrain2(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_2._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain_2._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_3.py b/src/ansys/dpf/core/operators/result/compute_total_strain_3.py index 63375edeb38..9a00f48e6b7 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain_3.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain_3.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain_3(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,41 +307,45 @@ class InputsComputeTotalStrain3(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_3._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( compute_total_strain_3._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain_3._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain_3._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain_3._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_total_strain_3._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._extrapolate = Input( + self._extrapolate: Input[int] = Input( compute_total_strain_3._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain_3._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain_3._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain_3._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain_3._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain_3._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -353,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -374,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -395,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -416,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -437,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -458,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -479,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -500,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -535,13 +548,13 @@ class OutputsComputeTotalStrain3(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_3._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain_3._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_X.py b/src/ansys/dpf/core/operators/result/compute_total_strain_X.py index 9b770d80e14..ad700f82ba9 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain_X.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain_X(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,41 +307,45 @@ class InputsComputeTotalStrainX(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_X._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( compute_total_strain_X._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain_X._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_total_strain_X._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._extrapolate = Input( + self._extrapolate: Input[int] = Input( compute_total_strain_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain_X._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain_X._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain_X._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain_X._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -353,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -374,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -395,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -416,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -437,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -458,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -479,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -500,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -535,13 +548,13 @@ class OutputsComputeTotalStrainX(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_XY.py b/src/ansys/dpf/core/operators/result/compute_total_strain_XY.py index bd9fec4c60d..17f8918dd46 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain_XY.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain_XY(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,41 +307,45 @@ class InputsComputeTotalStrainXy(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_XY._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( compute_total_strain_XY._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain_XY._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain_XY._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain_XY._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_total_strain_XY._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._extrapolate = Input( + self._extrapolate: Input[int] = Input( compute_total_strain_XY._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain_XY._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain_XY._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain_XY._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain_XY._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -353,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -374,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -395,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -416,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -437,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -458,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -479,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -500,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -535,13 +548,13 @@ class OutputsComputeTotalStrainXy(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_XY._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain_XY._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_XZ.py b/src/ansys/dpf/core/operators/result/compute_total_strain_XZ.py index 4b905a0b016..c3557be6dc1 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain_XZ.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain_XZ(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,41 +307,45 @@ class InputsComputeTotalStrainXz(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_XZ._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( compute_total_strain_XZ._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain_XZ._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain_XZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain_XZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_total_strain_XZ._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._extrapolate = Input( + self._extrapolate: Input[int] = Input( compute_total_strain_XZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain_XZ._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain_XZ._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain_XZ._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain_XZ._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -353,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -374,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -395,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -416,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -437,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -458,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -479,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -500,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -535,13 +548,13 @@ class OutputsComputeTotalStrainXz(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_XZ._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain_XZ._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_Y.py b/src/ansys/dpf/core/operators/result/compute_total_strain_Y.py index ad9f1ea9dbb..4575e2aeafe 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain_Y.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain_Y(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,41 +307,45 @@ class InputsComputeTotalStrainY(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_Y._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( compute_total_strain_Y._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain_Y._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_total_strain_Y._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._extrapolate = Input( + self._extrapolate: Input[int] = Input( compute_total_strain_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain_Y._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain_Y._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain_Y._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain_Y._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -353,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -374,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -395,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -416,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -437,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -458,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -479,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -500,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -535,13 +548,13 @@ class OutputsComputeTotalStrainY(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_Y._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain_Y._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_YZ.py b/src/ansys/dpf/core/operators/result/compute_total_strain_YZ.py index 2bf0051b5a7..e57c3a81731 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain_YZ.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain_YZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain_YZ(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,41 +307,45 @@ class InputsComputeTotalStrainYz(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_YZ._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( compute_total_strain_YZ._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain_YZ._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain_YZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain_YZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_total_strain_YZ._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._extrapolate = Input( + self._extrapolate: Input[int] = Input( compute_total_strain_YZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain_YZ._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain_YZ._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain_YZ._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain_YZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain_YZ._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -353,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -374,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -395,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -416,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -437,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -458,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -479,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -500,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -535,13 +548,13 @@ class OutputsComputeTotalStrainYz(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_YZ._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain_YZ._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_Z.py b/src/ansys/dpf/core/operators/result/compute_total_strain_Z.py index 4cda111f956..5f3d31306a7 100644 --- a/src/ansys/dpf/core/operators/result/compute_total_strain_Z.py +++ b/src/ansys/dpf/core/operators/result/compute_total_strain_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class compute_total_strain_Z(Operator): r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar, @@ -298,41 +307,45 @@ class InputsComputeTotalStrainZ(_Inputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_Z._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( compute_total_strain_Z._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._scoping = Input(compute_total_strain_Z._spec().input_pin(1), 1, op, -1) + self._scoping: Input[Scoping] = Input( + compute_total_strain_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( compute_total_strain_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( compute_total_strain_Z._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._extrapolate = Input( + self._extrapolate: Input[int] = Input( compute_total_strain_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._extrapolate) - self._nonlinear = Input(compute_total_strain_Z._spec().input_pin(6), 6, op, -1) + self._nonlinear: Input[int] = Input( + compute_total_strain_Z._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._nonlinear) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( compute_total_strain_Z._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._requested_location = Input( + self._requested_location: Input[str] = Input( compute_total_strain_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._displacement = Input( + self._displacement: Input[FieldsContainer | Field] = Input( compute_total_strain_Z._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._displacement) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1.. Will only be used if no displacement input is given (will be applied on displacement operator). @@ -353,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. The element scoping on which the result is computed. @@ -374,7 +387,7 @@ def scoping(self) -> Input: return self._scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected. @@ -395,7 +408,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected. @@ -416,7 +429,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def extrapolate(self) -> Input: + def extrapolate(self) -> Input[int]: r"""Allows to connect extrapolate input to the operator. Whether to extrapolate the data from the integration points to the nodes. @@ -437,7 +450,7 @@ def extrapolate(self) -> Input: return self._extrapolate @property - def nonlinear(self) -> Input: + def nonlinear(self) -> Input[int]: r"""Allows to connect nonlinear input to the operator. Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity). @@ -458,7 +471,7 @@ def nonlinear(self) -> Input: return self._nonlinear @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support. @@ -479,7 +492,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Average the Elemental Nodal result to the requested location. @@ -500,7 +513,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def displacement(self) -> Input: + def displacement(self) -> Input[FieldsContainer | Field]: r"""Allows to connect displacement input to the operator. Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources. @@ -535,13 +548,13 @@ class OutputsComputeTotalStrainZ(_Outputs): def __init__(self, op: Operator): super().__init__(compute_total_strain_Z._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( compute_total_strain_Z._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator The computed result fields container (elemental nodal). diff --git a/src/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py b/src/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py index 346a7f2e899..065db44a0a4 100644 --- a/src/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py +++ b/src/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class contact_fluid_penetration_pressure(Operator): r"""Read/compute element actual applied fluid penetration pressure by @@ -619,73 +630,73 @@ class InputsContactFluidPenetrationPressure(_Inputs): def __init__(self, op: Operator): super().__init__(contact_fluid_penetration_pressure._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( contact_fluid_penetration_pressure._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( contact_fluid_penetration_pressure._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( contact_fluid_penetration_pressure._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( contact_fluid_penetration_pressure._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( contact_fluid_penetration_pressure._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( contact_fluid_penetration_pressure._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( contact_fluid_penetration_pressure._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( contact_fluid_penetration_pressure._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( contact_fluid_penetration_pressure._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( contact_fluid_penetration_pressure._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( contact_fluid_penetration_pressure._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input( + self._phi: Input[float] = Input( contact_fluid_penetration_pressure._spec().input_pin(19), 19, op, -1 ) self._inputs.append(self._phi) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( contact_fluid_penetration_pressure._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( contact_fluid_penetration_pressure._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( contact_fluid_penetration_pressure._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( contact_fluid_penetration_pressure._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -706,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -727,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -748,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -769,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -790,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -811,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -832,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -853,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -874,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -895,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -916,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -937,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -958,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -979,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -1000,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1035,13 +1046,13 @@ class OutputsContactFluidPenetrationPressure(_Outputs): def __init__(self, op: Operator): super().__init__(contact_fluid_penetration_pressure._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( contact_fluid_penetration_pressure._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/contact_friction_stress.py b/src/ansys/dpf/core/operators/result/contact_friction_stress.py index d2f59fa3e0a..2a537cc5dbf 100644 --- a/src/ansys/dpf/core/operators/result/contact_friction_stress.py +++ b/src/ansys/dpf/core/operators/result/contact_friction_stress.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class contact_friction_stress(Operator): r"""Read/compute element contact friction stress by calling the readers @@ -619,69 +630,73 @@ class InputsContactFrictionStress(_Inputs): def __init__(self, op: Operator): super().__init__(contact_friction_stress._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( contact_friction_stress._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( contact_friction_stress._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( contact_friction_stress._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( contact_friction_stress._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( contact_friction_stress._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( contact_friction_stress._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_friction_stress._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + contact_friction_stress._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( contact_friction_stress._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( contact_friction_stress._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( contact_friction_stress._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( contact_friction_stress._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(contact_friction_stress._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + contact_friction_stress._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( contact_friction_stress._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( contact_friction_stress._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( contact_friction_stress._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( contact_friction_stress._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -702,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -723,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -744,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -765,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -786,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -807,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -828,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -849,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -870,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -891,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -912,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -933,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -954,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -975,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -996,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1031,13 +1046,13 @@ class OutputsContactFrictionStress(_Outputs): def __init__(self, op: Operator): super().__init__(contact_friction_stress._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( contact_friction_stress._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/contact_gap_distance.py b/src/ansys/dpf/core/operators/result/contact_gap_distance.py index 0f4ab2663e4..981ea4a0ec4 100644 --- a/src/ansys/dpf/core/operators/result/contact_gap_distance.py +++ b/src/ansys/dpf/core/operators/result/contact_gap_distance.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class contact_gap_distance(Operator): r"""Read/compute element contact gap distance by calling the readers defined @@ -619,61 +630,73 @@ class InputsContactGapDistance(_Inputs): def __init__(self, op: Operator): super().__init__(contact_gap_distance._spec().inputs, op) - self._time_scoping = Input(contact_gap_distance._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + contact_gap_distance._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_gap_distance._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + contact_gap_distance._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( contact_gap_distance._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( contact_gap_distance._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_gap_distance._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + contact_gap_distance._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( contact_gap_distance._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_gap_distance._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + contact_gap_distance._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( contact_gap_distance._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( contact_gap_distance._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( contact_gap_distance._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( contact_gap_distance._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(contact_gap_distance._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + contact_gap_distance._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input(contact_gap_distance._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + contact_gap_distance._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( contact_gap_distance._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( contact_gap_distance._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( contact_gap_distance._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -694,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -715,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -736,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -757,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -778,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -799,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -820,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -841,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -862,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -883,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -904,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -925,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -946,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -967,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -988,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1023,13 +1046,13 @@ class OutputsContactGapDistance(_Outputs): def __init__(self, op: Operator): super().__init__(contact_gap_distance._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( contact_gap_distance._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/contact_penetration.py b/src/ansys/dpf/core/operators/result/contact_penetration.py index b2ec7ecd21a..96e4444b463 100644 --- a/src/ansys/dpf/core/operators/result/contact_penetration.py +++ b/src/ansys/dpf/core/operators/result/contact_penetration.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class contact_penetration(Operator): r"""Read/compute element contact penetration by calling the readers defined @@ -619,57 +630,73 @@ class InputsContactPenetration(_Inputs): def __init__(self, op: Operator): super().__init__(contact_penetration._spec().inputs, op) - self._time_scoping = Input(contact_penetration._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + contact_penetration._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_penetration._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + contact_penetration._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( contact_penetration._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( contact_penetration._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_penetration._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + contact_penetration._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( contact_penetration._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_penetration._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + contact_penetration._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( contact_penetration._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_penetration._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + contact_penetration._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( contact_penetration._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( contact_penetration._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(contact_penetration._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + contact_penetration._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input(contact_penetration._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + contact_penetration._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( contact_penetration._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input(contact_penetration._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + contact_penetration._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( contact_penetration._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -690,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -711,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -732,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -753,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -774,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -795,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -816,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -837,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -858,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -879,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -900,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -921,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -942,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -963,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -984,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1019,13 +1046,13 @@ class OutputsContactPenetration(_Outputs): def __init__(self, op: Operator): super().__init__(contact_penetration._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( contact_penetration._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/contact_pressure.py b/src/ansys/dpf/core/operators/result/contact_pressure.py index 55f426aa53a..5e940cb6702 100644 --- a/src/ansys/dpf/core/operators/result/contact_pressure.py +++ b/src/ansys/dpf/core/operators/result/contact_pressure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class contact_pressure(Operator): r"""Read/compute element contact pressure by calling the readers defined by @@ -619,53 +630,73 @@ class InputsContactPressure(_Inputs): def __init__(self, op: Operator): super().__init__(contact_pressure._spec().inputs, op) - self._time_scoping = Input(contact_pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + contact_pressure._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + contact_pressure._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_pressure._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + contact_pressure._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( contact_pressure._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_pressure._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + contact_pressure._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( contact_pressure._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_pressure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + contact_pressure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( contact_pressure._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_pressure._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + contact_pressure._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( contact_pressure._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( contact_pressure._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(contact_pressure._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + contact_pressure._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input(contact_pressure._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + contact_pressure._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(contact_pressure._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + contact_pressure._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(contact_pressure._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + contact_pressure._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( contact_pressure._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -686,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -707,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -728,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -749,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -770,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -791,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -812,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -833,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -854,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -875,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -896,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -917,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -938,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -959,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -980,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1015,11 +1046,13 @@ class OutputsContactPressure(_Outputs): def __init__(self, op: Operator): super().__init__(contact_pressure._spec().outputs, op) - self._fields_container = Output(contact_pressure._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + contact_pressure._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/contact_sliding_distance.py b/src/ansys/dpf/core/operators/result/contact_sliding_distance.py index b9ed6fbbea2..b21ae6289ad 100644 --- a/src/ansys/dpf/core/operators/result/contact_sliding_distance.py +++ b/src/ansys/dpf/core/operators/result/contact_sliding_distance.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class contact_sliding_distance(Operator): r"""Read/compute element contact sliding distance by calling the readers @@ -619,69 +630,73 @@ class InputsContactSlidingDistance(_Inputs): def __init__(self, op: Operator): super().__init__(contact_sliding_distance._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( contact_sliding_distance._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( contact_sliding_distance._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( contact_sliding_distance._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( contact_sliding_distance._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( contact_sliding_distance._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( contact_sliding_distance._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_sliding_distance._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + contact_sliding_distance._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( contact_sliding_distance._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( contact_sliding_distance._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( contact_sliding_distance._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( contact_sliding_distance._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(contact_sliding_distance._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + contact_sliding_distance._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( contact_sliding_distance._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( contact_sliding_distance._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( contact_sliding_distance._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( contact_sliding_distance._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -702,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -723,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -744,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -765,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -786,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -807,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -828,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -849,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -870,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -891,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -912,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -933,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -954,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -975,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -996,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1031,13 +1046,13 @@ class OutputsContactSlidingDistance(_Outputs): def __init__(self, op: Operator): super().__init__(contact_sliding_distance._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( contact_sliding_distance._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/contact_status.py b/src/ansys/dpf/core/operators/result/contact_status.py index 3bba0c902c0..8a89d8cecb5 100644 --- a/src/ansys/dpf/core/operators/result/contact_status.py +++ b/src/ansys/dpf/core/operators/result/contact_status.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class contact_status(Operator): r"""Read/compute element contact status by calling the readers defined by @@ -619,49 +630,73 @@ class InputsContactStatus(_Inputs): def __init__(self, op: Operator): super().__init__(contact_status._spec().inputs, op) - self._time_scoping = Input(contact_status._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + contact_status._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_status._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + contact_status._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_status._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + contact_status._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_status._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + contact_status._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_status._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + contact_status._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( contact_status._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_status._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + contact_status._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(contact_status._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + contact_status._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_status._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + contact_status._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( contact_status._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( contact_status._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(contact_status._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + contact_status._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input(contact_status._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + contact_status._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(contact_status._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + contact_status._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(contact_status._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + contact_status._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( contact_status._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -682,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -703,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -724,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -745,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -766,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -787,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -808,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -829,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -850,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -871,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -892,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -913,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -934,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -955,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -976,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1011,11 +1046,13 @@ class OutputsContactStatus(_Outputs): def __init__(self, op: Operator): super().__init__(contact_status._spec().outputs, op) - self._fields_container = Output(contact_status._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + contact_status._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/contact_surface_heat_flux.py b/src/ansys/dpf/core/operators/result/contact_surface_heat_flux.py index 3fc27616a20..5246f645b3b 100644 --- a/src/ansys/dpf/core/operators/result/contact_surface_heat_flux.py +++ b/src/ansys/dpf/core/operators/result/contact_surface_heat_flux.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class contact_surface_heat_flux(Operator): r"""Read/compute element total heat flux at contact surface by calling the @@ -619,69 +630,73 @@ class InputsContactSurfaceHeatFlux(_Inputs): def __init__(self, op: Operator): super().__init__(contact_surface_heat_flux._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( contact_surface_heat_flux._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( contact_surface_heat_flux._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( contact_surface_heat_flux._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( contact_surface_heat_flux._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( contact_surface_heat_flux._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( contact_surface_heat_flux._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_surface_heat_flux._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + contact_surface_heat_flux._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( contact_surface_heat_flux._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( contact_surface_heat_flux._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( contact_surface_heat_flux._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( contact_surface_heat_flux._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(contact_surface_heat_flux._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + contact_surface_heat_flux._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( contact_surface_heat_flux._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( contact_surface_heat_flux._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( contact_surface_heat_flux._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( contact_surface_heat_flux._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -702,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -723,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -744,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -765,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -786,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -807,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -828,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -849,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -870,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -891,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -912,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -933,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -954,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -975,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -996,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1031,13 +1046,13 @@ class OutputsContactSurfaceHeatFlux(_Outputs): def __init__(self, op: Operator): super().__init__(contact_surface_heat_flux._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( contact_surface_heat_flux._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/contact_total_stress.py b/src/ansys/dpf/core/operators/result/contact_total_stress.py index 132e2bf6ee6..ca06f042786 100644 --- a/src/ansys/dpf/core/operators/result/contact_total_stress.py +++ b/src/ansys/dpf/core/operators/result/contact_total_stress.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class contact_total_stress(Operator): r"""Read/compute element contact total stress (pressure plus friction) by @@ -619,61 +630,73 @@ class InputsContactTotalStress(_Inputs): def __init__(self, op: Operator): super().__init__(contact_total_stress._spec().inputs, op) - self._time_scoping = Input(contact_total_stress._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + contact_total_stress._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_total_stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + contact_total_stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( contact_total_stress._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( contact_total_stress._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_total_stress._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + contact_total_stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( contact_total_stress._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_total_stress._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + contact_total_stress._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( contact_total_stress._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( contact_total_stress._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( contact_total_stress._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( contact_total_stress._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(contact_total_stress._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + contact_total_stress._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input(contact_total_stress._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + contact_total_stress._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( contact_total_stress._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( contact_total_stress._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( contact_total_stress._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -694,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -715,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -736,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -757,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -778,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -799,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -820,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -841,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -862,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -883,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -904,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -925,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -946,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -967,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -988,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1023,13 +1046,13 @@ class OutputsContactTotalStress(_Outputs): def __init__(self, op: Operator): super().__init__(contact_total_stress._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( contact_total_stress._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/coordinate_system.py b/src/ansys/dpf/core/operators/result/coordinate_system.py index 925ea98ae56..7c598edac41 100644 --- a/src/ansys/dpf/core/operators/result/coordinate_system.py +++ b/src/ansys/dpf/core/operators/result/coordinate_system.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.core import errors @@ -15,6 +16,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.streams_container import StreamsContainer + class coordinate_system(Operator): r"""Extracts the Rotation Matrix and Origin of a specific coordinate system. @@ -183,17 +189,21 @@ class InputsCoordinateSystem(_Inputs): def __init__(self, op: Operator): super().__init__(coordinate_system._spec().inputs, op) - self._cs_id = Input(coordinate_system._spec().input_pin(0), 0, op, -1) + self._cs_id: Input[int] = Input( + coordinate_system._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._cs_id) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( coordinate_system._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(coordinate_system._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + coordinate_system._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def cs_id(self) -> Input: + def cs_id(self) -> Input[int]: r"""Allows to connect cs_id input to the operator. Returns @@ -212,7 +222,7 @@ def cs_id(self) -> Input: return self._cs_id @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -231,7 +241,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -264,11 +274,13 @@ class OutputsCoordinateSystem(_Outputs): def __init__(self, op: Operator): super().__init__(coordinate_system._spec().outputs, op) - self._field = Output(coordinate_system._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + coordinate_system._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator the first 9 double are the rotation (3x3 matrix) and the last 3 is the translation vector diff --git a/src/ansys/dpf/core/operators/result/coordinates.py b/src/ansys/dpf/core/operators/result/coordinates.py index feff61b1357..acab492b33e 100644 --- a/src/ansys/dpf/core/operators/result/coordinates.py +++ b/src/ansys/dpf/core/operators/result/coordinates.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class coordinates(Operator): r"""Read/compute Coordinates (LSDyna) by calling the readers defined by the @@ -251,23 +262,37 @@ class InputsCoordinates(_Inputs): def __init__(self, op: Operator): super().__init__(coordinates._spec().inputs, op) - self._time_scoping = Input(coordinates._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + coordinates._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(coordinates._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + coordinates._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(coordinates._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + coordinates._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(coordinates._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + coordinates._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(coordinates._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + coordinates._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(coordinates._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + coordinates._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(coordinates._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + coordinates._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -288,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -309,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -330,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -351,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -372,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -393,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -428,11 +453,13 @@ class OutputsCoordinates(_Outputs): def __init__(self, op: Operator): super().__init__(coordinates._spec().outputs, op) - self._fields_container = Output(coordinates._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + coordinates._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain.py b/src/ansys/dpf/core/operators/result/creep_strain.py index de5a88db046..6af5abbb83d 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain.py +++ b/src/ansys/dpf/core/operators/result/creep_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain(Operator): r"""Read/compute element nodal component creep strains by calling the @@ -555,37 +566,57 @@ class InputsCreepStrain(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain._spec().inputs, op) - self._time_scoping = Input(creep_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + creep_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(creep_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + creep_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(creep_strain._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + creep_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(creep_strain._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + creep_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(creep_strain._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + creep_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(creep_strain._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + creep_strain._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_beams = Input(creep_strain._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + creep_strain._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(creep_strain._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + creep_strain._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(creep_strain._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + creep_strain._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( creep_strain._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -606,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -627,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -648,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -669,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -690,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -711,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -732,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -753,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -774,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -795,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -816,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -851,11 +882,13 @@ class OutputsCreepStrain(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain._spec().outputs, op) - self._fields_container = Output(creep_strain._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + creep_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_X.py b/src/ansys/dpf/core/operators/result/creep_strain_X.py index 803d29f0f64..da1b388f460 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_X.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_X(Operator): r"""Read/compute element nodal component creep strains XX normal component @@ -303,31 +314,49 @@ class InputsCreepStrainX(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_X._spec().inputs, op) - self._time_scoping = Input(creep_strain_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + creep_strain_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(creep_strain_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + creep_strain_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(creep_strain_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + creep_strain_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(creep_strain_X._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + creep_strain_X._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(creep_strain_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + creep_strain_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(creep_strain_X._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + creep_strain_X._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(creep_strain_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + creep_strain_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(creep_strain_X._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + creep_strain_X._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -348,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -369,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -390,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -411,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -432,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -453,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -474,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -495,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -516,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -551,11 +580,13 @@ class OutputsCreepStrainX(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_X._spec().outputs, op) - self._fields_container = Output(creep_strain_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + creep_strain_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_XY.py b/src/ansys/dpf/core/operators/result/creep_strain_XY.py index baac38241e3..78f43d1c445 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_XY.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_XY(Operator): r"""Read/compute element nodal component creep strains XY shear component @@ -303,33 +314,49 @@ class InputsCreepStrainXy(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_XY._spec().inputs, op) - self._time_scoping = Input(creep_strain_XY._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + creep_strain_XY._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(creep_strain_XY._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + creep_strain_XY._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(creep_strain_XY._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + creep_strain_XY._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(creep_strain_XY._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + creep_strain_XY._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(creep_strain_XY._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + creep_strain_XY._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_XY._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_XY._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_XY._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(creep_strain_XY._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + creep_strain_XY._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(creep_strain_XY._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + creep_strain_XY._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -350,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -371,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -392,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -413,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -434,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -455,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -476,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -497,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -518,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -553,11 +580,13 @@ class OutputsCreepStrainXy(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_XY._spec().outputs, op) - self._fields_container = Output(creep_strain_XY._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + creep_strain_XY._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_XZ.py b/src/ansys/dpf/core/operators/result/creep_strain_XZ.py index 118480a68ca..52b4d422bac 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_XZ.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_XZ(Operator): r"""Read/compute element nodal component creep strains XZ shear component @@ -303,33 +314,49 @@ class InputsCreepStrainXz(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_XZ._spec().inputs, op) - self._time_scoping = Input(creep_strain_XZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + creep_strain_XZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(creep_strain_XZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + creep_strain_XZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(creep_strain_XZ._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + creep_strain_XZ._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(creep_strain_XZ._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + creep_strain_XZ._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(creep_strain_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + creep_strain_XZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_XZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_XZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_XZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(creep_strain_XZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + creep_strain_XZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(creep_strain_XZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + creep_strain_XZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -350,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -371,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -392,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -413,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -434,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -455,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -476,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -497,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -518,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -553,11 +580,13 @@ class OutputsCreepStrainXz(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_XZ._spec().outputs, op) - self._fields_container = Output(creep_strain_XZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + creep_strain_XZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_Y.py b/src/ansys/dpf/core/operators/result/creep_strain_Y.py index 584dbbeee51..26564424b99 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_Y.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_Y(Operator): r"""Read/compute element nodal component creep strains YY normal component @@ -303,31 +314,49 @@ class InputsCreepStrainY(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_Y._spec().inputs, op) - self._time_scoping = Input(creep_strain_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + creep_strain_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(creep_strain_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + creep_strain_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(creep_strain_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + creep_strain_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(creep_strain_Y._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + creep_strain_Y._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(creep_strain_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + creep_strain_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(creep_strain_Y._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + creep_strain_Y._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(creep_strain_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + creep_strain_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(creep_strain_Y._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + creep_strain_Y._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -348,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -369,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -390,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -411,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -432,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -453,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -474,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -495,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -516,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -551,11 +580,13 @@ class OutputsCreepStrainY(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_Y._spec().outputs, op) - self._fields_container = Output(creep_strain_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + creep_strain_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_YZ.py b/src/ansys/dpf/core/operators/result/creep_strain_YZ.py index 08aa6326844..4f6ac5505bb 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_YZ.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_YZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_YZ(Operator): r"""Read/compute element nodal component creep strains YZ shear component @@ -303,33 +314,49 @@ class InputsCreepStrainYz(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_YZ._spec().inputs, op) - self._time_scoping = Input(creep_strain_YZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + creep_strain_YZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(creep_strain_YZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + creep_strain_YZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(creep_strain_YZ._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + creep_strain_YZ._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(creep_strain_YZ._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + creep_strain_YZ._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(creep_strain_YZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + creep_strain_YZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_YZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_YZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_YZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_YZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(creep_strain_YZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + creep_strain_YZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(creep_strain_YZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + creep_strain_YZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -350,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -371,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -392,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -413,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -434,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -455,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -476,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -497,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -518,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -553,11 +580,13 @@ class OutputsCreepStrainYz(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_YZ._spec().outputs, op) - self._fields_container = Output(creep_strain_YZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + creep_strain_YZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_Z.py b/src/ansys/dpf/core/operators/result/creep_strain_Z.py index bff559388a0..8fe81208b1b 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_Z.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_Z(Operator): r"""Read/compute element nodal component creep strains ZZ normal component @@ -303,31 +314,49 @@ class InputsCreepStrainZ(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_Z._spec().inputs, op) - self._time_scoping = Input(creep_strain_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + creep_strain_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(creep_strain_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + creep_strain_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(creep_strain_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + creep_strain_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(creep_strain_Z._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + creep_strain_Z._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(creep_strain_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + creep_strain_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(creep_strain_Z._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + creep_strain_Z._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(creep_strain_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + creep_strain_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(creep_strain_Z._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + creep_strain_Z._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -348,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -369,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -390,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -411,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -432,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -453,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -474,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -495,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -516,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -551,11 +580,13 @@ class OutputsCreepStrainZ(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_Z._spec().outputs, op) - self._fields_container = Output(creep_strain_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + creep_strain_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_energy_density.py b/src/ansys/dpf/core/operators/result/creep_strain_energy_density.py index f8df12d726e..fb96b857981 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_energy_density.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_energy_density.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_energy_density(Operator): r"""Read/compute element nodal creep strain energy density by calling the @@ -555,55 +566,57 @@ class InputsCreepStrainEnergyDensity(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_energy_density._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( creep_strain_energy_density._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( creep_strain_energy_density._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( creep_strain_energy_density._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( creep_strain_energy_density._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( creep_strain_energy_density._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_energy_density._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_energy_density._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_energy_density._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_energy_density._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( creep_strain_energy_density._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( creep_strain_energy_density._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( creep_strain_energy_density._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( creep_strain_energy_density._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -624,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -645,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -666,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -687,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -708,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -729,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -750,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -771,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -792,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -813,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -834,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -869,13 +882,13 @@ class OutputsCreepStrainEnergyDensity(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_energy_density._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( creep_strain_energy_density._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_eqv.py b/src/ansys/dpf/core/operators/result/creep_strain_eqv.py index 3667213652c..11b4ef1670c 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_eqv.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_eqv.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_eqv(Operator): r"""Read/compute element nodal equivalent component creep strains by calling @@ -555,41 +566,57 @@ class InputsCreepStrainEqv(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_eqv._spec().inputs, op) - self._time_scoping = Input(creep_strain_eqv._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + creep_strain_eqv._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(creep_strain_eqv._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + creep_strain_eqv._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(creep_strain_eqv._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + creep_strain_eqv._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( creep_strain_eqv._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(creep_strain_eqv._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + creep_strain_eqv._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_eqv._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_eqv._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_eqv._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_eqv._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input(creep_strain_eqv._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + creep_strain_eqv._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(creep_strain_eqv._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + creep_strain_eqv._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(creep_strain_eqv._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + creep_strain_eqv._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( creep_strain_eqv._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -610,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -631,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -652,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -673,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -694,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -715,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -736,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -757,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -778,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -799,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -820,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -855,11 +882,13 @@ class OutputsCreepStrainEqv(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_eqv._spec().outputs, op) - self._fields_container = Output(creep_strain_eqv._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + creep_strain_eqv._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_intensity.py b/src/ansys/dpf/core/operators/result/creep_strain_intensity.py index 7892cc0ef38..fd9bf52e68a 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_intensity.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_intensity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_intensity(Operator): r"""Reads/computes element nodal component creep strains, average it on @@ -304,47 +315,49 @@ class InputsCreepStrainIntensity(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_intensity._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( creep_strain_intensity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( creep_strain_intensity._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( creep_strain_intensity._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( creep_strain_intensity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( creep_strain_intensity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_intensity._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_intensity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_intensity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_intensity._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( creep_strain_intensity._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( creep_strain_intensity._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -365,7 +378,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -386,7 +399,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -407,7 +420,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -428,7 +441,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -449,7 +462,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -470,7 +483,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -491,7 +504,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -510,7 +523,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -531,7 +544,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -566,13 +579,13 @@ class OutputsCreepStrainIntensity(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_intensity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( creep_strain_intensity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_max_shear.py b/src/ansys/dpf/core/operators/result/creep_strain_max_shear.py index 053776aef77..8abbb1bd6f8 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_max_shear.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_max_shear.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_max_shear(Operator): r"""Reads/computes element nodal component creep strains, average it on @@ -304,47 +315,49 @@ class InputsCreepStrainMaxShear(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_max_shear._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( creep_strain_max_shear._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( creep_strain_max_shear._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( creep_strain_max_shear._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( creep_strain_max_shear._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( creep_strain_max_shear._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_max_shear._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_max_shear._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_max_shear._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_max_shear._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( creep_strain_max_shear._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( creep_strain_max_shear._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -365,7 +378,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -386,7 +399,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -407,7 +420,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -428,7 +441,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -449,7 +462,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -470,7 +483,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -491,7 +504,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -510,7 +523,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -531,7 +544,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -566,13 +579,13 @@ class OutputsCreepStrainMaxShear(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_max_shear._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( creep_strain_max_shear._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_principal_1.py b/src/ansys/dpf/core/operators/result/creep_strain_principal_1.py index 32335eccb7e..2e54bf2f70b 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_principal_1.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_principal_1.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_principal_1(Operator): r"""Read/compute element nodal component creep strains 1st principal @@ -308,47 +319,49 @@ class InputsCreepStrainPrincipal1(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_principal_1._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( creep_strain_principal_1._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( creep_strain_principal_1._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( creep_strain_principal_1._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( creep_strain_principal_1._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( creep_strain_principal_1._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_principal_1._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_principal_1._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_principal_1._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_principal_1._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( creep_strain_principal_1._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( creep_strain_principal_1._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsCreepStrainPrincipal1(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_principal_1._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( creep_strain_principal_1._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_principal_2.py b/src/ansys/dpf/core/operators/result/creep_strain_principal_2.py index 1266e2c6f48..7b56f75c323 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_principal_2.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_principal_2.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_principal_2(Operator): r"""Read/compute element nodal component creep strains 2nd principal @@ -308,47 +319,49 @@ class InputsCreepStrainPrincipal2(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_principal_2._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( creep_strain_principal_2._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( creep_strain_principal_2._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( creep_strain_principal_2._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( creep_strain_principal_2._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( creep_strain_principal_2._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_principal_2._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_principal_2._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_principal_2._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_principal_2._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( creep_strain_principal_2._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( creep_strain_principal_2._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsCreepStrainPrincipal2(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_principal_2._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( creep_strain_principal_2._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/creep_strain_principal_3.py b/src/ansys/dpf/core/operators/result/creep_strain_principal_3.py index 5478aa7ff37..af091188be8 100644 --- a/src/ansys/dpf/core/operators/result/creep_strain_principal_3.py +++ b/src/ansys/dpf/core/operators/result/creep_strain_principal_3.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class creep_strain_principal_3(Operator): r"""Read/compute element nodal component creep strains 3rd principal @@ -308,47 +319,49 @@ class InputsCreepStrainPrincipal3(_Inputs): def __init__(self, op: Operator): super().__init__(creep_strain_principal_3._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( creep_strain_principal_3._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( creep_strain_principal_3._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( creep_strain_principal_3._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( creep_strain_principal_3._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( creep_strain_principal_3._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( creep_strain_principal_3._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_principal_3._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + creep_strain_principal_3._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( creep_strain_principal_3._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( creep_strain_principal_3._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( creep_strain_principal_3._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsCreepStrainPrincipal3(_Outputs): def __init__(self, op: Operator): super().__init__(creep_strain_principal_3._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( creep_strain_principal_3._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/current_density.py b/src/ansys/dpf/core/operators/result/current_density.py index 42173cb02f9..b0b74ccdaff 100644 --- a/src/ansys/dpf/core/operators/result/current_density.py +++ b/src/ansys/dpf/core/operators/result/current_density.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class current_density(Operator): r"""Read/compute Current Density by calling the readers defined by the @@ -267,25 +278,37 @@ class InputsCurrentDensity(_Inputs): def __init__(self, op: Operator): super().__init__(current_density._spec().inputs, op) - self._time_scoping = Input(current_density._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + current_density._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(current_density._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + current_density._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(current_density._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + current_density._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(current_density._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + current_density._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(current_density._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + current_density._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( current_density._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(current_density._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + current_density._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -306,7 +329,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -327,7 +350,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -348,7 +371,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -369,7 +392,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -390,7 +413,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -411,7 +434,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -446,11 +469,13 @@ class OutputsCurrentDensity(_Outputs): def __init__(self, op: Operator): super().__init__(current_density._spec().outputs, op) - self._fields_container = Output(current_density._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + current_density._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py b/src/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py index b01d4460a4c..5a804bad399 100644 --- a/src/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py +++ b/src/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.cyclic_support import CyclicSupport + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class cyclic_analytic_seqv_max(Operator): r"""Compute the maximum of the Von Mises equivalent stress that can be @@ -214,29 +221,29 @@ class InputsCyclicAnalyticSeqvMax(_Inputs): def __init__(self, op: Operator): super().__init__(cyclic_analytic_seqv_max._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping] = Input( cyclic_analytic_seqv_max._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( cyclic_analytic_seqv_max._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( cyclic_analytic_seqv_max._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( cyclic_analytic_seqv_max._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._cyclic_support = Input( + self._cyclic_support: Input[CyclicSupport] = Input( cyclic_analytic_seqv_max._spec().input_pin(16), 16, op, -1 ) self._inputs.append(self._cyclic_support) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -255,7 +262,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -274,7 +281,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field container with the base and duplicate sectors @@ -295,7 +302,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. default is true @@ -316,7 +323,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def cyclic_support(self) -> Input: + def cyclic_support(self) -> Input[CyclicSupport]: r"""Allows to connect cyclic_support input to the operator. Returns @@ -349,13 +356,13 @@ class OutputsCyclicAnalyticSeqvMax(_Outputs): def __init__(self, op: Operator): super().__init__(cyclic_analytic_seqv_max._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( cyclic_analytic_seqv_max._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator FieldsContainer filled in diff --git a/src/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py b/src/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py index 85217802f4c..af6ecc60779 100644 --- a/src/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py +++ b/src/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.cyclic_support import CyclicSupport + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class cyclic_analytic_usum_max(Operator): r"""Compute the maximum of the total deformation that can be expected on 360 @@ -210,29 +217,29 @@ class InputsCyclicAnalyticUsumMax(_Inputs): def __init__(self, op: Operator): super().__init__(cyclic_analytic_usum_max._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping] = Input( cyclic_analytic_usum_max._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( cyclic_analytic_usum_max._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( cyclic_analytic_usum_max._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( cyclic_analytic_usum_max._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._cyclic_support = Input( + self._cyclic_support: Input[CyclicSupport] = Input( cyclic_analytic_usum_max._spec().input_pin(16), 16, op, -1 ) self._inputs.append(self._cyclic_support) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -251,7 +258,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -270,7 +277,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field container with the base and duplicate sectors @@ -291,7 +298,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. default is true @@ -312,7 +319,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def cyclic_support(self) -> Input: + def cyclic_support(self) -> Input[CyclicSupport]: r"""Allows to connect cyclic_support input to the operator. Returns @@ -345,13 +352,13 @@ class OutputsCyclicAnalyticUsumMax(_Outputs): def __init__(self, op: Operator): super().__init__(cyclic_analytic_usum_max._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( cyclic_analytic_usum_max._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator FieldsContainer filled in diff --git a/src/ansys/dpf/core/operators/result/cyclic_expansion.py b/src/ansys/dpf/core/operators/result/cyclic_expansion.py index 5216d301e3c..cc593109a1b 100644 --- a/src/ansys/dpf/core/operators/result/cyclic_expansion.py +++ b/src/ansys/dpf/core/operators/result/cyclic_expansion.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.cyclic_support import CyclicSupport + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class cyclic_expansion(Operator): r"""Expand cyclic results from a fieldsContainer for given sets, sectors and @@ -303,39 +310,53 @@ class InputsCyclicExpansion(_Inputs): def __init__(self, op: Operator): super().__init__(cyclic_expansion._spec().inputs, op) - self._time_scoping = Input(cyclic_expansion._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping] = Input( + cyclic_expansion._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_expansion._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + cyclic_expansion._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_expansion._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + cyclic_expansion._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._harmonic_index = Input(cyclic_expansion._spec().input_pin(3), 3, op, -1) + self._harmonic_index: Input[int] = Input( + cyclic_expansion._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._harmonic_index) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( cyclic_expansion._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._map_size_scoping_out = Input( + self._map_size_scoping_out: Input = Input( cyclic_expansion._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._map_size_scoping_out) - self._normalization_factor = Input( + self._normalization_factor: Input[float] = Input( cyclic_expansion._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._normalization_factor) - self._merge_stages = Input(cyclic_expansion._spec().input_pin(14), 14, op, -1) + self._merge_stages: Input[bool] = Input( + cyclic_expansion._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._merge_stages) - self._cyclic_support = Input(cyclic_expansion._spec().input_pin(16), 16, op, -1) + self._cyclic_support: Input[CyclicSupport] = Input( + cyclic_expansion._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( cyclic_expansion._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(cyclic_expansion._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + cyclic_expansion._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -354,7 +375,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -373,7 +394,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field container with the base and duplicate sectors @@ -394,7 +415,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def harmonic_index(self) -> Input: + def harmonic_index(self) -> Input[int]: r"""Allows to connect harmonic_index input to the operator. Returns @@ -413,7 +434,7 @@ def harmonic_index(self) -> Input: return self._harmonic_index @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. default is true @@ -455,7 +476,7 @@ def map_size_scoping_out(self) -> Input: return self._map_size_scoping_out @property - def normalization_factor(self) -> Input: + def normalization_factor(self) -> Input[float]: r"""Allows to connect normalization_factor input to the operator. Returns @@ -474,7 +495,7 @@ def normalization_factor(self) -> Input: return self._normalization_factor @property - def merge_stages(self) -> Input: + def merge_stages(self) -> Input[bool]: r"""Allows to connect merge_stages input to the operator. Returns @@ -493,7 +514,7 @@ def merge_stages(self) -> Input: return self._merge_stages @property - def cyclic_support(self) -> Input: + def cyclic_support(self) -> Input[CyclicSupport]: r"""Allows to connect cyclic_support input to the operator. Returns @@ -512,7 +533,7 @@ def cyclic_support(self) -> Input: return self._cyclic_support @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. @@ -533,7 +554,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0) @@ -568,11 +589,13 @@ class OutputsCyclicExpansion(_Outputs): def __init__(self, op: Operator): super().__init__(cyclic_expansion._spec().outputs, op) - self._fields_container = Output(cyclic_expansion._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + cyclic_expansion._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator FieldsContainer filled in diff --git a/src/ansys/dpf/core/operators/result/density.py b/src/ansys/dpf/core/operators/result/density.py index f454cc57335..c4ec7b353e2 100644 --- a/src/ansys/dpf/core/operators/result/density.py +++ b/src/ansys/dpf/core/operators/result/density.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class density(Operator): r"""Read Density by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsDensity(_Inputs): def __init__(self, op: Operator): super().__init__(density._spec().inputs, op) - self._time_scoping = Input(density._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + density._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(density._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + density._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(density._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + density._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(density._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + density._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(density._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + density._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(density._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + density._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(density._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + density._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(density._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + density._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsDensity(_Outputs): def __init__(self, op: Operator): super().__init__(density._spec().outputs, op) - self._fields_container = Output(density._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + density._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/displacement.py b/src/ansys/dpf/core/operators/result/displacement.py index 4d0bca2746f..29e5a581e95 100644 --- a/src/ansys/dpf/core/operators/result/displacement.py +++ b/src/ansys/dpf/core/operators/result/displacement.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class displacement(Operator): r"""Read/compute nodal displacements by calling the readers defined by the @@ -315,35 +326,51 @@ class InputsDisplacement(_Inputs): def __init__(self, op: Operator): super().__init__(displacement._spec().inputs, op) - self._time_scoping = Input(displacement._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + displacement._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(displacement._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + displacement._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(displacement._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + displacement._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(displacement._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + displacement._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(displacement._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + displacement._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( displacement._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(displacement._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + displacement._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(displacement._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + displacement._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( displacement._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(displacement._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( + displacement._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(displacement._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input(displacement._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +391,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +412,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -406,7 +433,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +454,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +475,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -469,7 +496,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -490,7 +517,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -511,7 +538,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -532,7 +559,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -553,7 +580,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -588,11 +615,13 @@ class OutputsDisplacement(_Outputs): def __init__(self, op: Operator): super().__init__(displacement._spec().outputs, op) - self._fields_container = Output(displacement._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + displacement._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/displacement_X.py b/src/ansys/dpf/core/operators/result/displacement_X.py index 8014d9566cc..3b4441a81a6 100644 --- a/src/ansys/dpf/core/operators/result/displacement_X.py +++ b/src/ansys/dpf/core/operators/result/displacement_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class displacement_X(Operator): r"""Read/compute nodal displacements X component of the vector (1st @@ -267,27 +278,41 @@ class InputsDisplacementX(_Inputs): def __init__(self, op: Operator): super().__init__(displacement_X._spec().inputs, op) - self._time_scoping = Input(displacement_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + displacement_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(displacement_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + displacement_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(displacement_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + displacement_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(displacement_X._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + displacement_X._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(displacement_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + displacement_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( displacement_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(displacement_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + displacement_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(displacement_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + displacement_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -308,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -329,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -350,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -371,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -392,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -413,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -469,11 +494,13 @@ class OutputsDisplacementX(_Outputs): def __init__(self, op: Operator): super().__init__(displacement_X._spec().outputs, op) - self._fields_container = Output(displacement_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + displacement_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/displacement_Y.py b/src/ansys/dpf/core/operators/result/displacement_Y.py index 6548f5c5237..33d664e77d7 100644 --- a/src/ansys/dpf/core/operators/result/displacement_Y.py +++ b/src/ansys/dpf/core/operators/result/displacement_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class displacement_Y(Operator): r"""Read/compute nodal displacements Y component of the vector (2nd @@ -267,27 +278,41 @@ class InputsDisplacementY(_Inputs): def __init__(self, op: Operator): super().__init__(displacement_Y._spec().inputs, op) - self._time_scoping = Input(displacement_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + displacement_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(displacement_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + displacement_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(displacement_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + displacement_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(displacement_Y._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + displacement_Y._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(displacement_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + displacement_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( displacement_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(displacement_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + displacement_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(displacement_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + displacement_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -308,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -329,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -350,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -371,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -392,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -413,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -469,11 +494,13 @@ class OutputsDisplacementY(_Outputs): def __init__(self, op: Operator): super().__init__(displacement_Y._spec().outputs, op) - self._fields_container = Output(displacement_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + displacement_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/displacement_Z.py b/src/ansys/dpf/core/operators/result/displacement_Z.py index 2598ea88994..642fd2d5c4a 100644 --- a/src/ansys/dpf/core/operators/result/displacement_Z.py +++ b/src/ansys/dpf/core/operators/result/displacement_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class displacement_Z(Operator): r"""Read/compute nodal displacements Z component of the vector (3rd @@ -267,27 +278,41 @@ class InputsDisplacementZ(_Inputs): def __init__(self, op: Operator): super().__init__(displacement_Z._spec().inputs, op) - self._time_scoping = Input(displacement_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + displacement_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(displacement_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + displacement_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(displacement_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + displacement_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(displacement_Z._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + displacement_Z._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(displacement_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + displacement_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( displacement_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(displacement_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + displacement_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(displacement_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + displacement_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -308,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -329,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -350,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -371,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -392,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -413,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -469,11 +494,13 @@ class OutputsDisplacementZ(_Outputs): def __init__(self, op: Operator): super().__init__(displacement_Z._spec().outputs, op) - self._fields_container = Output(displacement_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + displacement_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/div_lighthill_tensor.py b/src/ansys/dpf/core/operators/result/div_lighthill_tensor.py index ca6e99b2a0e..28598c17058 100644 --- a/src/ansys/dpf/core/operators/result/div_lighthill_tensor.py +++ b/src/ansys/dpf/core/operators/result/div_lighthill_tensor.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class div_lighthill_tensor(Operator): r"""Read Divergence of the Lighthill Tensor by calling the readers defined @@ -267,33 +278,41 @@ class InputsDivLighthillTensor(_Inputs): def __init__(self, op: Operator): super().__init__(div_lighthill_tensor._spec().inputs, op) - self._time_scoping = Input(div_lighthill_tensor._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + div_lighthill_tensor._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(div_lighthill_tensor._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + div_lighthill_tensor._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( div_lighthill_tensor._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(div_lighthill_tensor._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + div_lighthill_tensor._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(div_lighthill_tensor._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + div_lighthill_tensor._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( div_lighthill_tensor._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( div_lighthill_tensor._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( div_lighthill_tensor._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,13 +494,13 @@ class OutputsDivLighthillTensor(_Outputs): def __init__(self, op: Operator): super().__init__(div_lighthill_tensor._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( div_lighthill_tensor._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/dynamic_viscosity.py b/src/ansys/dpf/core/operators/result/dynamic_viscosity.py index af86d8343a8..0a884c5a943 100644 --- a/src/ansys/dpf/core/operators/result/dynamic_viscosity.py +++ b/src/ansys/dpf/core/operators/result/dynamic_viscosity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class dynamic_viscosity(Operator): r"""Read Dynamic Viscosity by calling the readers defined by the @@ -267,33 +278,41 @@ class InputsDynamicViscosity(_Inputs): def __init__(self, op: Operator): super().__init__(dynamic_viscosity._spec().inputs, op) - self._time_scoping = Input(dynamic_viscosity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + dynamic_viscosity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(dynamic_viscosity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + dynamic_viscosity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( dynamic_viscosity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(dynamic_viscosity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + dynamic_viscosity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(dynamic_viscosity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + dynamic_viscosity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( dynamic_viscosity._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( dynamic_viscosity._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( dynamic_viscosity._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,11 +494,13 @@ class OutputsDynamicViscosity(_Outputs): def __init__(self, op: Operator): super().__init__(dynamic_viscosity._spec().outputs, op) - self._fields_container = Output(dynamic_viscosity._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + dynamic_viscosity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain.py b/src/ansys/dpf/core/operators/result/elastic_strain.py index ff8b4c00830..16ba90b2397 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain(Operator): r"""Read/compute element nodal component elastic strains by calling the @@ -619,49 +630,73 @@ class InputsElasticStrain(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain._spec().inputs, op) - self._time_scoping = Input(elastic_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elastic_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elastic_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + elastic_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + elastic_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elastic_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + elastic_strain._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + elastic_strain._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( elastic_strain._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( elastic_strain._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(elastic_strain._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + elastic_strain._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input(elastic_strain._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + elastic_strain._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(elastic_strain._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + elastic_strain._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(elastic_strain._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + elastic_strain._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( elastic_strain._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -682,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -703,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -724,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -745,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -766,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -787,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -808,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -829,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -850,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -871,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -892,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -913,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -934,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -955,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -976,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1011,11 +1046,13 @@ class OutputsElasticStrain(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain._spec().outputs, op) - self._fields_container = Output(elastic_strain._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elastic_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_X.py b/src/ansys/dpf/core/operators/result/elastic_strain_X.py index 39c254facca..f800eb28230 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_X.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_X(Operator): r"""Read/compute element nodal component elastic strains XX normal component @@ -303,35 +314,49 @@ class InputsElasticStrainX(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_X._spec().inputs, op) - self._time_scoping = Input(elastic_strain_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elastic_strain_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elastic_strain_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + elastic_strain_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elastic_strain_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + elastic_strain_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_X._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + elastic_strain_X._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsElasticStrainX(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_X._spec().outputs, op) - self._fields_container = Output(elastic_strain_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elastic_strain_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_XY.py b/src/ansys/dpf/core/operators/result/elastic_strain_XY.py index 35f9795b9d2..bcf596cde42 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_XY.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_XY(Operator): r"""Read/compute element nodal component elastic strains XY shear component @@ -303,37 +314,49 @@ class InputsElasticStrainXy(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_XY._spec().inputs, op) - self._time_scoping = Input(elastic_strain_XY._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elastic_strain_XY._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_XY._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elastic_strain_XY._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_XY._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_XY._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_XY._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elastic_strain_XY._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_XY._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_XY._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_XY._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_XY._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + elastic_strain_XY._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_XY._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + elastic_strain_XY._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsElasticStrainXy(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_XY._spec().outputs, op) - self._fields_container = Output(elastic_strain_XY._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elastic_strain_XY._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_XZ.py b/src/ansys/dpf/core/operators/result/elastic_strain_XZ.py index b5832863a72..1d79c4e5c03 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_XZ.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_XZ(Operator): r"""Read/compute element nodal component elastic strains XZ shear component @@ -303,37 +314,49 @@ class InputsElasticStrainXz(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_XZ._spec().inputs, op) - self._time_scoping = Input(elastic_strain_XZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elastic_strain_XZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_XZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elastic_strain_XZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_XZ._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_XZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elastic_strain_XZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_XZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_XZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_XZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_XZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + elastic_strain_XZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_XZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + elastic_strain_XZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsElasticStrainXz(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_XZ._spec().outputs, op) - self._fields_container = Output(elastic_strain_XZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elastic_strain_XZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_Y.py b/src/ansys/dpf/core/operators/result/elastic_strain_Y.py index 778c3aa62f9..166b01e28f1 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_Y.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_Y(Operator): r"""Read/compute element nodal component elastic strains YY normal component @@ -303,35 +314,49 @@ class InputsElasticStrainY(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_Y._spec().inputs, op) - self._time_scoping = Input(elastic_strain_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elastic_strain_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elastic_strain_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + elastic_strain_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elastic_strain_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + elastic_strain_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_Y._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + elastic_strain_Y._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsElasticStrainY(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_Y._spec().outputs, op) - self._fields_container = Output(elastic_strain_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elastic_strain_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_YZ.py b/src/ansys/dpf/core/operators/result/elastic_strain_YZ.py index 7fa4e884d02..2dfd2b8c40d 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_YZ.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_YZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_YZ(Operator): r"""Read/compute element nodal component elastic strains YZ shear component @@ -303,37 +314,49 @@ class InputsElasticStrainYz(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_YZ._spec().inputs, op) - self._time_scoping = Input(elastic_strain_YZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elastic_strain_YZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_YZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elastic_strain_YZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_YZ._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_YZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_YZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elastic_strain_YZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_YZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_YZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_YZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_YZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_YZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + elastic_strain_YZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_YZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + elastic_strain_YZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsElasticStrainYz(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_YZ._spec().outputs, op) - self._fields_container = Output(elastic_strain_YZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elastic_strain_YZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_Z.py b/src/ansys/dpf/core/operators/result/elastic_strain_Z.py index 48846d57dc1..f1a000082a9 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_Z.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_Z(Operator): r"""Read/compute element nodal component elastic strains ZZ normal component @@ -303,35 +314,49 @@ class InputsElasticStrainZ(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_Z._spec().inputs, op) - self._time_scoping = Input(elastic_strain_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elastic_strain_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elastic_strain_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + elastic_strain_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elastic_strain_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + elastic_strain_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_Z._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + elastic_strain_Z._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsElasticStrainZ(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_Z._spec().outputs, op) - self._fields_container = Output(elastic_strain_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elastic_strain_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_energy_density.py b/src/ansys/dpf/core/operators/result/elastic_strain_energy_density.py index 0aed53cb5b4..e955b87edd8 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_energy_density.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_energy_density.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_energy_density(Operator): r"""Read/compute element nodal elastic strain energy density by calling the @@ -555,57 +566,57 @@ class InputsElasticStrainEnergyDensity(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_energy_density._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( elastic_strain_energy_density._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( elastic_strain_energy_density._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_energy_density._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_energy_density._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( elastic_strain_energy_density._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_energy_density._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( elastic_strain_energy_density._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_energy_density._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( elastic_strain_energy_density._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( elastic_strain_energy_density._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( elastic_strain_energy_density._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( elastic_strain_energy_density._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -626,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -647,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -668,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -689,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -710,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -731,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -752,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -773,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -794,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -815,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -836,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -871,13 +882,13 @@ class OutputsElasticStrainEnergyDensity(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_energy_density._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elastic_strain_energy_density._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_eqv.py b/src/ansys/dpf/core/operators/result/elastic_strain_eqv.py index 9667defe197..912039fd556 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_eqv.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_eqv.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_eqv(Operator): r"""Read/compute element nodal equivalent elastic strain by calling the @@ -555,43 +566,57 @@ class InputsElasticStrainEqv(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_eqv._spec().inputs, op) - self._time_scoping = Input(elastic_strain_eqv._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elastic_strain_eqv._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_eqv._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elastic_strain_eqv._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_eqv._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_eqv._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_eqv._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elastic_strain_eqv._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_eqv._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_eqv._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_eqv._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_eqv._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input(elastic_strain_eqv._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + elastic_strain_eqv._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(elastic_strain_eqv._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + elastic_strain_eqv._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(elastic_strain_eqv._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + elastic_strain_eqv._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( elastic_strain_eqv._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -612,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -633,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -654,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -675,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -696,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -717,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -738,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -759,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -780,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -801,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -822,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -857,11 +882,13 @@ class OutputsElasticStrainEqv(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_eqv._spec().outputs, op) - self._fields_container = Output(elastic_strain_eqv._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elastic_strain_eqv._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_intensity.py b/src/ansys/dpf/core/operators/result/elastic_strain_intensity.py index 57cd685554e..55c9917b77b 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_intensity.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_intensity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_intensity(Operator): r"""Reads/computes element nodal component elastic strains, average it on @@ -304,47 +315,49 @@ class InputsElasticStrainIntensity(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_intensity._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( elastic_strain_intensity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( elastic_strain_intensity._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_intensity._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_intensity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( elastic_strain_intensity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_intensity._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_intensity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_intensity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_intensity._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( elastic_strain_intensity._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( elastic_strain_intensity._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -365,7 +378,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -386,7 +399,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -407,7 +420,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -428,7 +441,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -449,7 +462,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -470,7 +483,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -491,7 +504,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -510,7 +523,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -531,7 +544,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -566,13 +579,13 @@ class OutputsElasticStrainIntensity(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_intensity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elastic_strain_intensity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_max_shear.py b/src/ansys/dpf/core/operators/result/elastic_strain_max_shear.py index bade30cc6f5..2abeb511615 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_max_shear.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_max_shear.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_max_shear(Operator): r"""Reads/computes element nodal component elastic strains, average it on @@ -304,47 +315,49 @@ class InputsElasticStrainMaxShear(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_max_shear._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( elastic_strain_max_shear._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( elastic_strain_max_shear._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_max_shear._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_max_shear._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( elastic_strain_max_shear._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_max_shear._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_max_shear._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_max_shear._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_max_shear._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( elastic_strain_max_shear._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( elastic_strain_max_shear._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -365,7 +378,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -386,7 +399,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -407,7 +420,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -428,7 +441,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -449,7 +462,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -470,7 +483,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -491,7 +504,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -510,7 +523,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -531,7 +544,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -566,13 +579,13 @@ class OutputsElasticStrainMaxShear(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_max_shear._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elastic_strain_max_shear._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_principal_1.py b/src/ansys/dpf/core/operators/result/elastic_strain_principal_1.py index c2d0ad82256..e51f76445c5 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_principal_1.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_principal_1.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_principal_1(Operator): r"""Read/compute element nodal component elastic strains 1st principal @@ -308,47 +319,49 @@ class InputsElasticStrainPrincipal1(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_principal_1._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( elastic_strain_principal_1._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( elastic_strain_principal_1._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_principal_1._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_principal_1._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( elastic_strain_principal_1._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_principal_1._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_principal_1._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_principal_1._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_principal_1._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( elastic_strain_principal_1._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( elastic_strain_principal_1._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsElasticStrainPrincipal1(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_principal_1._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elastic_strain_principal_1._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_principal_2.py b/src/ansys/dpf/core/operators/result/elastic_strain_principal_2.py index ff68d52d7c5..7bb63f6ae65 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_principal_2.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_principal_2.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_principal_2(Operator): r"""Read/compute element nodal component elastic strains 2nd principal @@ -308,47 +319,49 @@ class InputsElasticStrainPrincipal2(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_principal_2._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( elastic_strain_principal_2._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( elastic_strain_principal_2._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_principal_2._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_principal_2._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( elastic_strain_principal_2._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_principal_2._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_principal_2._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_principal_2._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_principal_2._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( elastic_strain_principal_2._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( elastic_strain_principal_2._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsElasticStrainPrincipal2(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_principal_2._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elastic_strain_principal_2._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_principal_3.py b/src/ansys/dpf/core/operators/result/elastic_strain_principal_3.py index 58b012d825f..158ebed9243 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_principal_3.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_principal_3.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_principal_3(Operator): r"""Read/compute element nodal component elastic strains 3rd principal @@ -308,47 +319,49 @@ class InputsElasticStrainPrincipal3(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_principal_3._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( elastic_strain_principal_3._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( elastic_strain_principal_3._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_principal_3._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_principal_3._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( elastic_strain_principal_3._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elastic_strain_principal_3._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_principal_3._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elastic_strain_principal_3._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( elastic_strain_principal_3._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( elastic_strain_principal_3._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( elastic_strain_principal_3._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsElasticStrainPrincipal3(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_principal_3._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elastic_strain_principal_3._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py b/src/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py index eda9fd562a4..3d98b6c670e 100644 --- a/src/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py +++ b/src/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elastic_strain_rotation_by_euler_nodes(Operator): r"""read Euler angles on elements from the result file and rotate the fields @@ -24,7 +30,7 @@ class elastic_strain_rotation_by_euler_nodes(Operator): ------ fields_container: FieldsContainer, optional streams_container: StreamsContainer or Stream or Class - Dataprocessing::Crstfilewrapper, optional + Dataprocessing::Crstfilewrapper, optional data_sources: DataSources Outputs @@ -188,21 +194,21 @@ class InputsElasticStrainRotationByEulerNodes(_Inputs): def __init__(self, op: Operator): super().__init__(elastic_strain_rotation_by_euler_nodes._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elastic_strain_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elastic_strain_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( elastic_strain_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -221,7 +227,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -240,7 +246,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -273,13 +279,13 @@ class OutputsElasticStrainRotationByEulerNodes(_Outputs): def __init__(self, op: Operator): super().__init__(elastic_strain_rotation_by_euler_nodes._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elastic_strain_rotation_by_euler_nodes._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/electric_field.py b/src/ansys/dpf/core/operators/result/electric_field.py index 8d4f15bfa8f..08ab24217a1 100644 --- a/src/ansys/dpf/core/operators/result/electric_field.py +++ b/src/ansys/dpf/core/operators/result/electric_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class electric_field(Operator): r"""Read/compute electric field by calling the readers defined by the @@ -555,37 +566,57 @@ class InputsElectricField(_Inputs): def __init__(self, op: Operator): super().__init__(electric_field._spec().inputs, op) - self._time_scoping = Input(electric_field._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + electric_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(electric_field._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + electric_field._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(electric_field._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + electric_field._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(electric_field._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + electric_field._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(electric_field._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + electric_field._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( electric_field._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_field._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + electric_field._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(electric_field._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + electric_field._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_beams = Input(electric_field._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + electric_field._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(electric_field._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + electric_field._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(electric_field._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + electric_field._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( electric_field._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -606,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -627,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -648,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -669,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -690,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -711,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -732,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -753,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -774,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -795,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -816,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -851,11 +882,13 @@ class OutputsElectricField(_Outputs): def __init__(self, op: Operator): super().__init__(electric_field._spec().outputs, op) - self._fields_container = Output(electric_field._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + electric_field._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/electric_field_X.py b/src/ansys/dpf/core/operators/result/electric_field_X.py index 8f9d3fcc7f1..97526d51c9a 100644 --- a/src/ansys/dpf/core/operators/result/electric_field_X.py +++ b/src/ansys/dpf/core/operators/result/electric_field_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class electric_field_X(Operator): r"""Read/compute electric field X component of the vector (1st component) by @@ -303,35 +314,49 @@ class InputsElectricFieldX(_Inputs): def __init__(self, op: Operator): super().__init__(electric_field_X._spec().inputs, op) - self._time_scoping = Input(electric_field_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + electric_field_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(electric_field_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + electric_field_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(electric_field_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + electric_field_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( electric_field_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(electric_field_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + electric_field_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( electric_field_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_field_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + electric_field_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( electric_field_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(electric_field_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + electric_field_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(electric_field_X._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + electric_field_X._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsElectricFieldX(_Outputs): def __init__(self, op: Operator): super().__init__(electric_field_X._spec().outputs, op) - self._fields_container = Output(electric_field_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + electric_field_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/electric_field_Y.py b/src/ansys/dpf/core/operators/result/electric_field_Y.py index 74d61013a5d..8d3f0cadd6d 100644 --- a/src/ansys/dpf/core/operators/result/electric_field_Y.py +++ b/src/ansys/dpf/core/operators/result/electric_field_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class electric_field_Y(Operator): r"""Read/compute electric field Y component of the vector (2nd component) by @@ -303,35 +314,49 @@ class InputsElectricFieldY(_Inputs): def __init__(self, op: Operator): super().__init__(electric_field_Y._spec().inputs, op) - self._time_scoping = Input(electric_field_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + electric_field_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(electric_field_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + electric_field_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(electric_field_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + electric_field_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( electric_field_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(electric_field_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + electric_field_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( electric_field_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_field_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + electric_field_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( electric_field_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(electric_field_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + electric_field_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(electric_field_Y._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + electric_field_Y._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsElectricFieldY(_Outputs): def __init__(self, op: Operator): super().__init__(electric_field_Y._spec().outputs, op) - self._fields_container = Output(electric_field_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + electric_field_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/electric_field_Z.py b/src/ansys/dpf/core/operators/result/electric_field_Z.py index 715eafd6314..1a3f1467a72 100644 --- a/src/ansys/dpf/core/operators/result/electric_field_Z.py +++ b/src/ansys/dpf/core/operators/result/electric_field_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class electric_field_Z(Operator): r"""Read/compute electric field Z component of the vector (3rd component) by @@ -303,35 +314,49 @@ class InputsElectricFieldZ(_Inputs): def __init__(self, op: Operator): super().__init__(electric_field_Z._spec().inputs, op) - self._time_scoping = Input(electric_field_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + electric_field_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(electric_field_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + electric_field_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(electric_field_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + electric_field_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( electric_field_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(electric_field_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + electric_field_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( electric_field_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_field_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + electric_field_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( electric_field_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(electric_field_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + electric_field_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(electric_field_Z._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + electric_field_Z._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsElectricFieldZ(_Outputs): def __init__(self, op: Operator): super().__init__(electric_field_Z._spec().outputs, op) - self._fields_container = Output(electric_field_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + electric_field_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density.py b/src/ansys/dpf/core/operators/result/electric_flux_density.py index 82823b7dddd..919eb47ad8b 100644 --- a/src/ansys/dpf/core/operators/result/electric_flux_density.py +++ b/src/ansys/dpf/core/operators/result/electric_flux_density.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class electric_flux_density(Operator): r"""Read/compute Electric flux density by calling the readers defined by the @@ -555,55 +566,57 @@ class InputsElectricFluxDensity(_Inputs): def __init__(self, op: Operator): super().__init__(electric_flux_density._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( electric_flux_density._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( electric_flux_density._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( electric_flux_density._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( electric_flux_density._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( electric_flux_density._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( electric_flux_density._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_flux_density._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + electric_flux_density._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( electric_flux_density._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( electric_flux_density._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( electric_flux_density._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( electric_flux_density._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( electric_flux_density._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -624,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -645,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -666,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -687,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -708,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -729,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -750,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -771,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -792,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -813,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -834,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -869,13 +882,13 @@ class OutputsElectricFluxDensity(_Outputs): def __init__(self, op: Operator): super().__init__(electric_flux_density._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( electric_flux_density._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density_X.py b/src/ansys/dpf/core/operators/result/electric_flux_density_X.py index 2ba8dc497c6..1db16f53d0f 100644 --- a/src/ansys/dpf/core/operators/result/electric_flux_density_X.py +++ b/src/ansys/dpf/core/operators/result/electric_flux_density_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class electric_flux_density_X(Operator): r"""Read/compute Electric flux density X component of the vector (1st @@ -303,47 +314,49 @@ class InputsElectricFluxDensityX(_Inputs): def __init__(self, op: Operator): super().__init__(electric_flux_density_X._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( electric_flux_density_X._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( electric_flux_density_X._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( electric_flux_density_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( electric_flux_density_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( electric_flux_density_X._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( electric_flux_density_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_flux_density_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + electric_flux_density_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( electric_flux_density_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( electric_flux_density_X._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( electric_flux_density_X._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -406,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -469,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -490,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -511,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -532,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -567,13 +580,13 @@ class OutputsElectricFluxDensityX(_Outputs): def __init__(self, op: Operator): super().__init__(electric_flux_density_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( electric_flux_density_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py b/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py index 3a54ff7c6e3..aea24cd49d7 100644 --- a/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py +++ b/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class electric_flux_density_Y(Operator): r"""Read/compute Electric flux density Y component of the vector (2nd @@ -303,47 +314,49 @@ class InputsElectricFluxDensityY(_Inputs): def __init__(self, op: Operator): super().__init__(electric_flux_density_Y._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( electric_flux_density_Y._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( electric_flux_density_Y._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( electric_flux_density_Y._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( electric_flux_density_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( electric_flux_density_Y._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( electric_flux_density_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_flux_density_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + electric_flux_density_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( electric_flux_density_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( electric_flux_density_Y._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( electric_flux_density_Y._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -406,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -469,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -490,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -511,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -532,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -567,13 +580,13 @@ class OutputsElectricFluxDensityY(_Outputs): def __init__(self, op: Operator): super().__init__(electric_flux_density_Y._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( electric_flux_density_Y._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py b/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py index 4768f9c528d..5ed6732fd48 100644 --- a/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py +++ b/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class electric_flux_density_Z(Operator): r"""Read/compute Electric flux density Z component of the vector (3rd @@ -303,47 +314,49 @@ class InputsElectricFluxDensityZ(_Inputs): def __init__(self, op: Operator): super().__init__(electric_flux_density_Z._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( electric_flux_density_Z._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( electric_flux_density_Z._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( electric_flux_density_Z._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( electric_flux_density_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( electric_flux_density_Z._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( electric_flux_density_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_flux_density_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + electric_flux_density_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( electric_flux_density_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( electric_flux_density_Z._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( electric_flux_density_Z._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -406,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -469,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -490,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -511,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -532,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -567,13 +580,13 @@ class OutputsElectricFluxDensityZ(_Outputs): def __init__(self, op: Operator): super().__init__(electric_flux_density_Z._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( electric_flux_density_Z._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/electric_potential.py b/src/ansys/dpf/core/operators/result/electric_potential.py index 4e7dccd5fbb..93ebee192d0 100644 --- a/src/ansys/dpf/core/operators/result/electric_potential.py +++ b/src/ansys/dpf/core/operators/result/electric_potential.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class electric_potential(Operator): r"""Read/compute electric Potential by calling the readers defined by the @@ -251,29 +262,37 @@ class InputsElectricPotential(_Inputs): def __init__(self, op: Operator): super().__init__(electric_potential._spec().inputs, op) - self._time_scoping = Input(electric_potential._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + electric_potential._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(electric_potential._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + electric_potential._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( electric_potential._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( electric_potential._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(electric_potential._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + electric_potential._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( electric_potential._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_potential._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + electric_potential._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,11 +453,13 @@ class OutputsElectricPotential(_Outputs): def __init__(self, op: Operator): super().__init__(electric_potential._spec().outputs, op) - self._fields_container = Output(electric_potential._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + electric_potential._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/element_centroids.py b/src/ansys/dpf/core/operators/result/element_centroids.py index e3a614bccb9..acf673e160f 100644 --- a/src/ansys/dpf/core/operators/result/element_centroids.py +++ b/src/ansys/dpf/core/operators/result/element_centroids.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class element_centroids(Operator): r"""Read/compute coordinate of the elemental centroids by calling the @@ -251,29 +262,37 @@ class InputsElementCentroids(_Inputs): def __init__(self, op: Operator): super().__init__(element_centroids._spec().inputs, op) - self._time_scoping = Input(element_centroids._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + element_centroids._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(element_centroids._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + element_centroids._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( element_centroids._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( element_centroids._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(element_centroids._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + element_centroids._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( element_centroids._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(element_centroids._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + element_centroids._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,11 +453,13 @@ class OutputsElementCentroids(_Outputs): def __init__(self, op: Operator): super().__init__(element_centroids._spec().outputs, op) - self._fields_container = Output(element_centroids._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + element_centroids._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/element_nodal_forces.py b/src/ansys/dpf/core/operators/result/element_nodal_forces.py index 5cb72522bad..02eed3b7df8 100644 --- a/src/ansys/dpf/core/operators/result/element_nodal_forces.py +++ b/src/ansys/dpf/core/operators/result/element_nodal_forces.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class element_nodal_forces(Operator): r"""Read/compute element nodal forces by calling the readers defined by the @@ -635,65 +646,77 @@ class InputsElementNodalForces(_Inputs): def __init__(self, op: Operator): super().__init__(element_nodal_forces._spec().inputs, op) - self._time_scoping = Input(element_nodal_forces._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + element_nodal_forces._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(element_nodal_forces._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + element_nodal_forces._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( element_nodal_forces._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( element_nodal_forces._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(element_nodal_forces._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + element_nodal_forces._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( element_nodal_forces._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(element_nodal_forces._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + element_nodal_forces._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( element_nodal_forces._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( element_nodal_forces._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( element_nodal_forces._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( element_nodal_forces._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(element_nodal_forces._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + element_nodal_forces._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input(element_nodal_forces._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + element_nodal_forces._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( element_nodal_forces._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( element_nodal_forces._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( element_nodal_forces._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) - self._split_force_components = Input( + self._split_force_components: Input[bool] = Input( element_nodal_forces._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._split_force_components) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -714,7 +737,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -735,7 +758,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -756,7 +779,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -777,7 +800,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -798,7 +821,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -819,7 +842,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -840,7 +863,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -861,7 +884,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -882,7 +905,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -903,7 +926,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -924,7 +947,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -945,7 +968,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -966,7 +989,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -987,7 +1010,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -1008,7 +1031,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1029,7 +1052,7 @@ def extend_to_mid_nodes(self) -> Input: return self._extend_to_mid_nodes @property - def split_force_components(self) -> Input: + def split_force_components(self) -> Input[bool]: r"""Allows to connect split_force_components input to the operator. If this pin is set to true, the output fields container splits the ENF by degree of freedom ("dof" label, 0 for translation, 1 for rotation, 2 for temperature) and derivative order ("derivative_order" label, 0 for stiffness terms, 1 for damping terms and 2 for inertial terms). Default is false. @@ -1064,13 +1087,13 @@ class OutputsElementNodalForces(_Outputs): def __init__(self, op: Operator): super().__init__(element_nodal_forces._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( element_nodal_forces._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/element_orientations.py b/src/ansys/dpf/core/operators/result/element_orientations.py index b43286a62f4..bbe0eb09aa0 100644 --- a/src/ansys/dpf/core/operators/result/element_orientations.py +++ b/src/ansys/dpf/core/operators/result/element_orientations.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class element_orientations(Operator): r"""Read/compute element euler angles by calling the readers defined by the @@ -555,47 +566,57 @@ class InputsElementOrientations(_Inputs): def __init__(self, op: Operator): super().__init__(element_orientations._spec().inputs, op) - self._time_scoping = Input(element_orientations._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + element_orientations._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(element_orientations._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + element_orientations._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( element_orientations._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( element_orientations._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(element_orientations._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + element_orientations._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( element_orientations._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(element_orientations._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + element_orientations._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( element_orientations._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input(element_orientations._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + element_orientations._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( element_orientations._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( element_orientations._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( element_orientations._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -616,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -637,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -658,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -679,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -700,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -721,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -742,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -763,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -784,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -805,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -826,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -861,13 +882,13 @@ class OutputsElementOrientations(_Outputs): def __init__(self, op: Operator): super().__init__(element_orientations._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( element_orientations._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/element_orientations_X.py b/src/ansys/dpf/core/operators/result/element_orientations_X.py index dbacbcfbedc..480ed8bba05 100644 --- a/src/ansys/dpf/core/operators/result/element_orientations_X.py +++ b/src/ansys/dpf/core/operators/result/element_orientations_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class element_orientations_X(Operator): r"""Read/compute element euler angles X component of the vector (1st @@ -303,47 +314,49 @@ class InputsElementOrientationsX(_Inputs): def __init__(self, op: Operator): super().__init__(element_orientations_X._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( element_orientations_X._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( element_orientations_X._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( element_orientations_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( element_orientations_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( element_orientations_X._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( element_orientations_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(element_orientations_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + element_orientations_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( element_orientations_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( element_orientations_X._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( element_orientations_X._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -406,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -469,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -490,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -511,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -532,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -567,13 +580,13 @@ class OutputsElementOrientationsX(_Outputs): def __init__(self, op: Operator): super().__init__(element_orientations_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( element_orientations_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/element_orientations_Y.py b/src/ansys/dpf/core/operators/result/element_orientations_Y.py index e864dc5e44a..5d350985a4a 100644 --- a/src/ansys/dpf/core/operators/result/element_orientations_Y.py +++ b/src/ansys/dpf/core/operators/result/element_orientations_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class element_orientations_Y(Operator): r"""Read/compute element euler angles Y component of the vector (2nd @@ -303,47 +314,49 @@ class InputsElementOrientationsY(_Inputs): def __init__(self, op: Operator): super().__init__(element_orientations_Y._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( element_orientations_Y._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( element_orientations_Y._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( element_orientations_Y._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( element_orientations_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( element_orientations_Y._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( element_orientations_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(element_orientations_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + element_orientations_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( element_orientations_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( element_orientations_Y._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( element_orientations_Y._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -406,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -469,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -490,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -511,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -532,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -567,13 +580,13 @@ class OutputsElementOrientationsY(_Outputs): def __init__(self, op: Operator): super().__init__(element_orientations_Y._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( element_orientations_Y._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/element_orientations_Z.py b/src/ansys/dpf/core/operators/result/element_orientations_Z.py index 4268e56e842..b6a334b0a88 100644 --- a/src/ansys/dpf/core/operators/result/element_orientations_Z.py +++ b/src/ansys/dpf/core/operators/result/element_orientations_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class element_orientations_Z(Operator): r"""Read/compute element euler angles Z component of the vector (3rd @@ -303,47 +314,49 @@ class InputsElementOrientationsZ(_Inputs): def __init__(self, op: Operator): super().__init__(element_orientations_Z._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( element_orientations_Z._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( element_orientations_Z._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( element_orientations_Z._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( element_orientations_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( element_orientations_Z._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( element_orientations_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(element_orientations_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + element_orientations_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( element_orientations_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( element_orientations_Z._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( element_orientations_Z._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -406,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -469,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -490,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -511,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -532,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -567,13 +580,13 @@ class OutputsElementOrientationsZ(_Outputs): def __init__(self, op: Operator): super().__init__(element_orientations_Z._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( element_orientations_Z._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elemental_heat_generation.py b/src/ansys/dpf/core/operators/result/elemental_heat_generation.py index e15738e45d1..8642e8622e0 100644 --- a/src/ansys/dpf/core/operators/result/elemental_heat_generation.py +++ b/src/ansys/dpf/core/operators/result/elemental_heat_generation.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elemental_heat_generation(Operator): r"""Read/compute Elemental Heat Generation by calling the readers defined by @@ -251,35 +262,37 @@ class InputsElementalHeatGeneration(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_heat_generation._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( elemental_heat_generation._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( elemental_heat_generation._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( elemental_heat_generation._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elemental_heat_generation._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( elemental_heat_generation._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elemental_heat_generation._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elemental_heat_generation._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elemental_heat_generation._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsElementalHeatGeneration(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_heat_generation._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( elemental_heat_generation._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elemental_mass.py b/src/ansys/dpf/core/operators/result/elemental_mass.py index 7085f109c33..3e6ad6ae39c 100644 --- a/src/ansys/dpf/core/operators/result/elemental_mass.py +++ b/src/ansys/dpf/core/operators/result/elemental_mass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elemental_mass(Operator): r"""Read/compute element mass by calling the readers defined by the @@ -251,25 +262,37 @@ class InputsElementalMass(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_mass._spec().inputs, op) - self._time_scoping = Input(elemental_mass._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elemental_mass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elemental_mass._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elemental_mass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elemental_mass._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + elemental_mass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elemental_mass._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + elemental_mass._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elemental_mass._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elemental_mass._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elemental_mass._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elemental_mass._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elemental_mass._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -290,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -311,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -332,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -353,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -374,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -395,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -430,11 +453,13 @@ class OutputsElementalMass(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_mass._spec().outputs, op) - self._fields_container = Output(elemental_mass._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elemental_mass._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/elemental_volume.py b/src/ansys/dpf/core/operators/result/elemental_volume.py index 2f8bbd4a26b..50e243b178f 100644 --- a/src/ansys/dpf/core/operators/result/elemental_volume.py +++ b/src/ansys/dpf/core/operators/result/elemental_volume.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class elemental_volume(Operator): r"""Read/compute element volume by calling the readers defined by the @@ -315,39 +326,53 @@ class InputsElementalVolume(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_volume._spec().inputs, op) - self._time_scoping = Input(elemental_volume._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + elemental_volume._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elemental_volume._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + elemental_volume._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elemental_volume._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + elemental_volume._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( elemental_volume._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(elemental_volume._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + elemental_volume._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( elemental_volume._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elemental_volume._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + elemental_volume._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(elemental_volume._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + elemental_volume._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( elemental_volume._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( elemental_volume._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(elemental_volume._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + elemental_volume._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -368,7 +393,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -389,7 +414,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -410,7 +435,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -431,7 +456,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -452,7 +477,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -473,7 +498,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -494,7 +519,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -515,7 +540,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -536,7 +561,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -557,7 +582,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -592,11 +617,13 @@ class OutputsElementalVolume(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_volume._spec().outputs, op) - self._fields_container = Output(elemental_volume._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + elemental_volume._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py b/src/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py index 0ccebfda8b0..f0ce1b74909 100644 --- a/src/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py +++ b/src/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class enf_rotation_by_euler_nodes(Operator): r"""read Euler angles on elements from the result file and rotate the fields @@ -24,7 +30,7 @@ class enf_rotation_by_euler_nodes(Operator): ------ fields_container: FieldsContainer, optional streams_container: StreamsContainer or Stream or Class - Dataprocessing::Crstfilewrapper, optional + Dataprocessing::Crstfilewrapper, optional data_sources: DataSources Outputs @@ -186,21 +192,21 @@ class InputsEnfRotationByEulerNodes(_Inputs): def __init__(self, op: Operator): super().__init__(enf_rotation_by_euler_nodes._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( enf_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( enf_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( enf_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -219,7 +225,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -238,7 +244,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -271,13 +277,13 @@ class OutputsEnfRotationByEulerNodes(_Outputs): def __init__(self, op: Operator): super().__init__(enf_rotation_by_euler_nodes._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( enf_rotation_by_euler_nodes._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/enthalpy.py b/src/ansys/dpf/core/operators/result/enthalpy.py index 3799a341d9d..dc4708c234d 100644 --- a/src/ansys/dpf/core/operators/result/enthalpy.py +++ b/src/ansys/dpf/core/operators/result/enthalpy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class enthalpy(Operator): r"""Read Enthalpy by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsEnthalpy(_Inputs): def __init__(self, op: Operator): super().__init__(enthalpy._spec().inputs, op) - self._time_scoping = Input(enthalpy._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + enthalpy._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(enthalpy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + enthalpy._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(enthalpy._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + enthalpy._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(enthalpy._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + enthalpy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(enthalpy._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + enthalpy._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(enthalpy._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + enthalpy._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(enthalpy._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + enthalpy._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(enthalpy._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + enthalpy._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsEnthalpy(_Outputs): def __init__(self, op: Operator): super().__init__(enthalpy._spec().outputs, op) - self._fields_container = Output(enthalpy._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + enthalpy._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/entropy.py b/src/ansys/dpf/core/operators/result/entropy.py index 75cf0ef848b..ef5c52171f6 100644 --- a/src/ansys/dpf/core/operators/result/entropy.py +++ b/src/ansys/dpf/core/operators/result/entropy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class entropy(Operator): r"""Read Entropy by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsEntropy(_Inputs): def __init__(self, op: Operator): super().__init__(entropy._spec().inputs, op) - self._time_scoping = Input(entropy._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + entropy._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(entropy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + entropy._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(entropy._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + entropy._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(entropy._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + entropy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(entropy._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + entropy._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(entropy._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + entropy._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(entropy._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + entropy._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(entropy._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + entropy._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsEntropy(_Outputs): def __init__(self, op: Operator): super().__init__(entropy._spec().outputs, op) - self._fields_container = Output(entropy._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + entropy._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/epsilon.py b/src/ansys/dpf/core/operators/result/epsilon.py index ec37d0d4d61..bf74a3246ab 100644 --- a/src/ansys/dpf/core/operators/result/epsilon.py +++ b/src/ansys/dpf/core/operators/result/epsilon.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class epsilon(Operator): r"""Read Turbulent Dissipation Rate (epsilon) by calling the readers defined @@ -267,25 +278,41 @@ class InputsEpsilon(_Inputs): def __init__(self, op: Operator): super().__init__(epsilon._spec().inputs, op) - self._time_scoping = Input(epsilon._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + epsilon._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(epsilon._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + epsilon._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(epsilon._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + epsilon._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(epsilon._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + epsilon._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(epsilon._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + epsilon._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(epsilon._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + epsilon._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(epsilon._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + epsilon._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(epsilon._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + epsilon._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -306,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -327,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -348,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -369,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -390,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -411,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -432,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -467,11 +494,13 @@ class OutputsEpsilon(_Outputs): def __init__(self, op: Operator): super().__init__(epsilon._spec().outputs, op) - self._fields_container = Output(epsilon._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + epsilon._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/equivalent_mass.py b/src/ansys/dpf/core/operators/result/equivalent_mass.py index 29bc4ba7d97..4e80c34f76a 100644 --- a/src/ansys/dpf/core/operators/result/equivalent_mass.py +++ b/src/ansys/dpf/core/operators/result/equivalent_mass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class equivalent_mass(Operator): r"""Read/compute equivalent dof mass by calling the readers defined by the @@ -331,39 +342,57 @@ class InputsEquivalentMass(_Inputs): def __init__(self, op: Operator): super().__init__(equivalent_mass._spec().inputs, op) - self._time_scoping = Input(equivalent_mass._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + equivalent_mass._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(equivalent_mass._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + equivalent_mass._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(equivalent_mass._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + equivalent_mass._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(equivalent_mass._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + equivalent_mass._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(equivalent_mass._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + equivalent_mass._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( equivalent_mass._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._all_dofs = Input(equivalent_mass._spec().input_pin(6), 6, op, -1) + self._all_dofs: Input[bool] = Input( + equivalent_mass._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._all_dofs) - self._mesh = Input(equivalent_mass._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + equivalent_mass._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(equivalent_mass._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + equivalent_mass._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( equivalent_mass._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( equivalent_mass._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(equivalent_mass._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + equivalent_mass._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -384,7 +413,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -405,7 +434,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -426,7 +455,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -447,7 +476,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -468,7 +497,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -489,7 +518,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def all_dofs(self) -> Input: + def all_dofs(self) -> Input[bool]: r"""Allows to connect all_dofs input to the operator. default is false. @@ -510,7 +539,7 @@ def all_dofs(self) -> Input: return self._all_dofs @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -531,7 +560,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -552,7 +581,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -573,7 +602,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -594,7 +623,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -629,11 +658,13 @@ class OutputsEquivalentMass(_Outputs): def __init__(self, op: Operator): super().__init__(equivalent_mass._spec().outputs, op) - self._fields_container = Output(equivalent_mass._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + equivalent_mass._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/equivalent_radiated_power.py b/src/ansys/dpf/core/operators/result/equivalent_radiated_power.py index fa75771f98d..875c4c2eeba 100644 --- a/src/ansys/dpf/core/operators/result/equivalent_radiated_power.py +++ b/src/ansys/dpf/core/operators/result/equivalent_radiated_power.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + class equivalent_radiated_power(Operator): r"""Compute the Equivalent Radiated Power (ERP) @@ -258,35 +265,41 @@ class InputsEquivalentRadiatedPower(_Inputs): def __init__(self, op: Operator): super().__init__(equivalent_radiated_power._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( equivalent_radiated_power._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(equivalent_radiated_power._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + equivalent_radiated_power._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._time_scoping = Input( + self._time_scoping: Input[int | Scoping] = Input( equivalent_radiated_power._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._time_scoping) - self._mass_density = Input( + self._mass_density: Input[float] = Input( equivalent_radiated_power._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._mass_density) - self._speed_of_sound = Input( + self._speed_of_sound: Input[float] = Input( equivalent_radiated_power._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._speed_of_sound) - self._erp_type = Input( + self._erp_type: Input[int] = Input( equivalent_radiated_power._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._erp_type) - self._boolean = Input(equivalent_radiated_power._spec().input_pin(6), 6, op, -1) + self._boolean: Input[bool] = Input( + equivalent_radiated_power._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._boolean) - self._factor = Input(equivalent_radiated_power._spec().input_pin(7), 7, op, -1) + self._factor: Input[float] = Input( + equivalent_radiated_power._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._factor) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. the input field container expects displacements fields @@ -307,7 +320,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. the mesh region in this pin has to be boundary or skin mesh @@ -328,7 +341,7 @@ def mesh(self) -> Input: return self._mesh @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[int | Scoping]: r"""Allows to connect time_scoping input to the operator. load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping @@ -349,7 +362,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mass_density(self) -> Input: + def mass_density(self) -> Input[float]: r"""Allows to connect mass_density input to the operator. mass density (if it's not specified, default value of the air is applied). @@ -370,7 +383,7 @@ def mass_density(self) -> Input: return self._mass_density @property - def speed_of_sound(self) -> Input: + def speed_of_sound(self) -> Input[float]: r"""Allows to connect speed_of_sound input to the operator. speed of sound (if it's not specified, default value of the speed of sound in the air is applied). @@ -391,7 +404,7 @@ def speed_of_sound(self) -> Input: return self._speed_of_sound @property - def erp_type(self) -> Input: + def erp_type(self) -> Input[int]: r"""Allows to connect erp_type input to the operator. if this pin is set to 0, the classical ERP is computed, 1 the corrected ERP is computed (a mesh of one face has to be given in the pin 1) and 2 the enhanced ERP is computed. Default is 0. @@ -412,7 +425,7 @@ def erp_type(self) -> Input: return self._erp_type @property - def boolean(self) -> Input: + def boolean(self) -> Input[bool]: r"""Allows to connect boolean input to the operator. if this pin is set to true, the ERP level in dB is computed @@ -433,7 +446,7 @@ def boolean(self) -> Input: return self._boolean @property - def factor(self) -> Input: + def factor(self) -> Input[float]: r"""Allows to connect factor input to the operator. erp reference value. Default is 1E-12 @@ -468,13 +481,13 @@ class OutputsEquivalentRadiatedPower(_Outputs): def __init__(self, op: Operator): super().__init__(equivalent_radiated_power._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( equivalent_radiated_power._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/eqv_stress_parameter.py b/src/ansys/dpf/core/operators/result/eqv_stress_parameter.py index 7b074442152..d8e0fd857ce 100644 --- a/src/ansys/dpf/core/operators/result/eqv_stress_parameter.py +++ b/src/ansys/dpf/core/operators/result/eqv_stress_parameter.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class eqv_stress_parameter(Operator): r"""Read/compute element nodal equivalent stress parameter by calling the @@ -555,47 +566,57 @@ class InputsEqvStressParameter(_Inputs): def __init__(self, op: Operator): super().__init__(eqv_stress_parameter._spec().inputs, op) - self._time_scoping = Input(eqv_stress_parameter._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + eqv_stress_parameter._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(eqv_stress_parameter._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + eqv_stress_parameter._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( eqv_stress_parameter._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( eqv_stress_parameter._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(eqv_stress_parameter._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + eqv_stress_parameter._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( eqv_stress_parameter._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(eqv_stress_parameter._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + eqv_stress_parameter._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( eqv_stress_parameter._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input(eqv_stress_parameter._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + eqv_stress_parameter._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( eqv_stress_parameter._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( eqv_stress_parameter._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( eqv_stress_parameter._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -616,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -637,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -658,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -679,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -700,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -721,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -742,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -763,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -784,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -805,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -826,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -861,13 +882,13 @@ class OutputsEqvStressParameter(_Outputs): def __init__(self, op: Operator): super().__init__(eqv_stress_parameter._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( eqv_stress_parameter._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/erp_radiation_efficiency.py b/src/ansys/dpf/core/operators/result/erp_radiation_efficiency.py index 56167a20da5..bbdbfbff2fb 100644 --- a/src/ansys/dpf/core/operators/result/erp_radiation_efficiency.py +++ b/src/ansys/dpf/core/operators/result/erp_radiation_efficiency.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + class erp_radiation_efficiency(Operator): r"""Compute the radiation efficiency (enhanced erp divided by classical erp) @@ -210,27 +217,29 @@ class InputsErpRadiationEfficiency(_Inputs): def __init__(self, op: Operator): super().__init__(erp_radiation_efficiency._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( erp_radiation_efficiency._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input(erp_radiation_efficiency._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + erp_radiation_efficiency._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._time_scoping = Input( + self._time_scoping: Input[int | Scoping] = Input( erp_radiation_efficiency._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._time_scoping) - self._mass_density = Input( + self._mass_density: Input[float] = Input( erp_radiation_efficiency._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._mass_density) - self._speed_of_sound = Input( + self._speed_of_sound: Input[float] = Input( erp_radiation_efficiency._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._speed_of_sound) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. the input field container expects displacements fields @@ -251,7 +260,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. the meshes region in this pin has to be boundary or skin mesh @@ -272,7 +281,7 @@ def mesh(self) -> Input: return self._mesh @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[int | Scoping]: r"""Allows to connect time_scoping input to the operator. load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping @@ -293,7 +302,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mass_density(self) -> Input: + def mass_density(self) -> Input[float]: r"""Allows to connect mass_density input to the operator. mass density (if it's not specified, default value of the air is applied). @@ -314,7 +323,7 @@ def mass_density(self) -> Input: return self._mass_density @property - def speed_of_sound(self) -> Input: + def speed_of_sound(self) -> Input[float]: r"""Allows to connect speed_of_sound input to the operator. speed of sound (if it's not specified, default value of the speed of sound in the air is applied). @@ -349,13 +358,13 @@ class OutputsErpRadiationEfficiency(_Outputs): def __init__(self, op: Operator): super().__init__(erp_radiation_efficiency._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( erp_radiation_efficiency._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/euler_load_buckling.py b/src/ansys/dpf/core/operators/result/euler_load_buckling.py index f2c7109e702..45f17e39185 100644 --- a/src/ansys/dpf/core/operators/result/euler_load_buckling.py +++ b/src/ansys/dpf/core/operators/result/euler_load_buckling.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + class euler_load_buckling(Operator): r"""Computing Euler’s Critical Load. Formula: Ncr = n\ *E*\ I\ *pi*\ pi @@ -215,25 +220,25 @@ class InputsEulerLoadBuckling(_Inputs): def __init__(self, op: Operator): super().__init__(euler_load_buckling._spec().inputs, op) - self._field_beam_end_condition = Input( + self._field_beam_end_condition: Input[DataSources | Field] = Input( euler_load_buckling._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._field_beam_end_condition) - self._field_beam_moment_inertia = Input( + self._field_beam_moment_inertia: Input[Field] = Input( euler_load_buckling._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._field_beam_moment_inertia) - self._field_beam_young_modulus = Input( + self._field_beam_young_modulus: Input[Field] = Input( euler_load_buckling._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._field_beam_young_modulus) - self._field_beam_length = Input( + self._field_beam_length: Input[Field] = Input( euler_load_buckling._spec().input_pin(8), 8, op, -1 ) self._inputs.append(self._field_beam_length) @property - def field_beam_end_condition(self) -> Input: + def field_beam_end_condition(self) -> Input[DataSources | Field]: r"""Allows to connect field_beam_end_condition input to the operator. This pin contains file csv or field of beam's end condition added by the user. If there's no file added, it would take value of all beam's end condition as 1. @@ -254,7 +259,7 @@ def field_beam_end_condition(self) -> Input: return self._field_beam_end_condition @property - def field_beam_moment_inertia(self) -> Input: + def field_beam_moment_inertia(self) -> Input[Field]: r"""Allows to connect field_beam_moment_inertia input to the operator. Field of beam's moment inertia @@ -275,7 +280,7 @@ def field_beam_moment_inertia(self) -> Input: return self._field_beam_moment_inertia @property - def field_beam_young_modulus(self) -> Input: + def field_beam_young_modulus(self) -> Input[Field]: r"""Allows to connect field_beam_young_modulus input to the operator. Field of beam's young modulus @@ -296,7 +301,7 @@ def field_beam_young_modulus(self) -> Input: return self._field_beam_young_modulus @property - def field_beam_length(self) -> Input: + def field_beam_length(self) -> Input[Field]: r"""Allows to connect field_beam_length input to the operator. Field of beam's length @@ -333,21 +338,21 @@ class OutputsEulerLoadBuckling(_Outputs): def __init__(self, op: Operator): super().__init__(euler_load_buckling._spec().outputs, op) - self._field_euler_critical_load = Output( + self._field_euler_critical_load: Output[Field] = Output( euler_load_buckling._spec().output_pin(0), 0, op ) self._outputs.append(self._field_euler_critical_load) - self._field_euler_critical_load_yy = Output( + self._field_euler_critical_load_yy: Output[Field] = Output( euler_load_buckling._spec().output_pin(1), 1, op ) self._outputs.append(self._field_euler_critical_load_yy) - self._field_euler_critical_load_zz = Output( + self._field_euler_critical_load_zz: Output[Field] = Output( euler_load_buckling._spec().output_pin(2), 2, op ) self._outputs.append(self._field_euler_critical_load_zz) @property - def field_euler_critical_load(self) -> Output: + def field_euler_critical_load(self) -> Output[Field]: r"""Allows to get field_euler_critical_load output of the operator This field contains Euler's Critical Load about the principle axis of the cross section having the least moment of inertia. @@ -367,7 +372,7 @@ def field_euler_critical_load(self) -> Output: return self._field_euler_critical_load @property - def field_euler_critical_load_yy(self) -> Output: + def field_euler_critical_load_yy(self) -> Output[Field]: r"""Allows to get field_euler_critical_load_yy output of the operator This field contains Euler's Critical Load on axis y. @@ -387,7 +392,7 @@ def field_euler_critical_load_yy(self) -> Output: return self._field_euler_critical_load_yy @property - def field_euler_critical_load_zz(self) -> Output: + def field_euler_critical_load_zz(self) -> Output[Field]: r"""Allows to get field_euler_critical_load_zz output of the operator This field contains Euler's Critical Load on axis z. diff --git a/src/ansys/dpf/core/operators/result/euler_nodes.py b/src/ansys/dpf/core/operators/result/euler_nodes.py index 6a21c80db78..d3c89a90634 100644 --- a/src/ansys/dpf/core/operators/result/euler_nodes.py +++ b/src/ansys/dpf/core/operators/result/euler_nodes.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.streams_container import StreamsContainer + class euler_nodes(Operator): r"""Reads a field made of 3 coordinates and 3 Euler angles (6 dofs) by node @@ -209,19 +216,29 @@ class InputsEulerNodes(_Inputs): def __init__(self, op: Operator): super().__init__(euler_nodes._spec().inputs, op) - self._streams_container = Input(euler_nodes._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + euler_nodes._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(euler_nodes._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + euler_nodes._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._filter_zeros = Input(euler_nodes._spec().input_pin(5), 5, op, -1) + self._filter_zeros: Input[bool] = Input( + euler_nodes._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._filter_zeros) - self._coord_and_euler = Input(euler_nodes._spec().input_pin(6), 6, op, -1) + self._coord_and_euler: Input[bool] = Input( + euler_nodes._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._coord_and_euler) - self._mesh = Input(euler_nodes._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + euler_nodes._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -240,7 +257,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -259,7 +276,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def filter_zeros(self) -> Input: + def filter_zeros(self) -> Input[bool]: r"""Allows to connect filter_zeros input to the operator. if true, then the field will only contain the scoping if any rotation is not zero. (default is false). @@ -280,7 +297,7 @@ def filter_zeros(self) -> Input: return self._filter_zeros @property - def coord_and_euler(self) -> Input: + def coord_and_euler(self) -> Input[bool]: r"""Allows to connect coord_and_euler input to the operator. if true, then the field has ncomp=6 with 3 coordinates and 3 Euler angles, else there is only the Euler angles (default is true). @@ -301,7 +318,7 @@ def coord_and_euler(self) -> Input: return self._coord_and_euler @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -334,11 +351,13 @@ class OutputsEulerNodes(_Outputs): def __init__(self, op: Operator): super().__init__(euler_nodes._spec().outputs, op) - self._fields_container = Output(euler_nodes._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + euler_nodes._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/fluid_velocity.py b/src/ansys/dpf/core/operators/result/fluid_velocity.py index 77765fd48d5..69334be6fa9 100644 --- a/src/ansys/dpf/core/operators/result/fluid_velocity.py +++ b/src/ansys/dpf/core/operators/result/fluid_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class fluid_velocity(Operator): r"""Read/compute FV by calling the readers defined by the datasources. @@ -249,25 +260,37 @@ class InputsFluidVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(fluid_velocity._spec().inputs, op) - self._time_scoping = Input(fluid_velocity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + fluid_velocity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(fluid_velocity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + fluid_velocity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(fluid_velocity._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + fluid_velocity._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(fluid_velocity._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + fluid_velocity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(fluid_velocity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + fluid_velocity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( fluid_velocity._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(fluid_velocity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + fluid_velocity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -288,7 +311,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -309,7 +332,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -330,7 +353,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -351,7 +374,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -372,7 +395,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -393,7 +416,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -428,11 +451,13 @@ class OutputsFluidVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(fluid_velocity._spec().outputs, op) - self._fields_container = Output(fluid_velocity._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + fluid_velocity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure.py b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure.py index 11ffbe15959..1983cbc2a1a 100644 --- a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure.py +++ b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_inelastic_closure(Operator): r"""Read/compute elemental gasket inelastic closure by calling the readers @@ -539,51 +550,53 @@ class InputsGasketInelasticClosure(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_inelastic_closure._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_inelastic_closure._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_inelastic_closure._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_inelastic_closure._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_inelastic_closure._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_inelastic_closure._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_inelastic_closure._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_inelastic_closure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_inelastic_closure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_inelastic_closure._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( gasket_inelastic_closure._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( gasket_inelastic_closure._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( gasket_inelastic_closure._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -604,7 +617,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -625,7 +638,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -646,7 +659,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -667,7 +680,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -688,7 +701,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -709,7 +722,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -730,7 +743,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -751,7 +764,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -772,7 +785,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -793,7 +806,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -828,13 +841,13 @@ class OutputsGasketInelasticClosure(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_inelastic_closure._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_inelastic_closure._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_X.py b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_X.py index dcef88a093a..12a36bcf69f 100644 --- a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_X.py +++ b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_inelastic_closure_X(Operator): r"""Read/compute elemental gasket inelastic closure XX normal component (00 @@ -287,43 +298,45 @@ class InputsGasketInelasticClosureX(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_inelastic_closure_X._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_inelastic_closure_X._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_inelastic_closure_X._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_inelastic_closure_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_inelastic_closure_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_inelastic_closure_X._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_inelastic_closure_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_inelastic_closure_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_inelastic_closure_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_inelastic_closure_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_inelastic_closure_X._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -344,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -365,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -386,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -407,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -428,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -449,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -470,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -491,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -526,13 +539,13 @@ class OutputsGasketInelasticClosureX(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_inelastic_closure_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_inelastic_closure_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XY.py b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XY.py index c202c14e19c..4b1f7b60e66 100644 --- a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XY.py +++ b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_inelastic_closure_XY(Operator): r"""Read/compute elemental gasket inelastic closure XY shear component (01 @@ -287,43 +298,45 @@ class InputsGasketInelasticClosureXy(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_inelastic_closure_XY._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_inelastic_closure_XY._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_inelastic_closure_XY._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_inelastic_closure_XY._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_inelastic_closure_XY._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_inelastic_closure_XY._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_inelastic_closure_XY._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_inelastic_closure_XY._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_inelastic_closure_XY._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_inelastic_closure_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_inelastic_closure_XY._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -344,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -365,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -386,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -407,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -428,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -449,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -470,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -491,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -526,13 +539,13 @@ class OutputsGasketInelasticClosureXy(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_inelastic_closure_XY._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_inelastic_closure_XY._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XZ.py b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XZ.py index f6d4370d215..7b93076bfbb 100644 --- a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XZ.py +++ b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_inelastic_closure_XZ(Operator): r"""Read/compute elemental gasket inelastic closure XZ shear component (02 @@ -287,43 +298,45 @@ class InputsGasketInelasticClosureXz(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_inelastic_closure_XZ._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_inelastic_closure_XZ._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_inelastic_closure_XZ._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_inelastic_closure_XZ._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_inelastic_closure_XZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_inelastic_closure_XZ._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_inelastic_closure_XZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_inelastic_closure_XZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_inelastic_closure_XZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_inelastic_closure_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_inelastic_closure_XZ._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -344,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -365,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -386,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -407,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -428,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -449,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -470,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -491,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -526,13 +539,13 @@ class OutputsGasketInelasticClosureXz(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_inelastic_closure_XZ._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_inelastic_closure_XZ._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_stress.py b/src/ansys/dpf/core/operators/result/gasket_stress.py index 7e454790a4d..145d69a057d 100644 --- a/src/ansys/dpf/core/operators/result/gasket_stress.py +++ b/src/ansys/dpf/core/operators/result/gasket_stress.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_stress(Operator): r"""Read/compute elemental gasket stress by calling the readers defined by @@ -539,35 +550,53 @@ class InputsGasketStress(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_stress._spec().inputs, op) - self._time_scoping = Input(gasket_stress._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + gasket_stress._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(gasket_stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + gasket_stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(gasket_stress._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + gasket_stress._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(gasket_stress._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + gasket_stress._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(gasket_stress._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + gasket_stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_stress._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_stress._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_stress._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(gasket_stress._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + gasket_stress._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._split_shells = Input(gasket_stress._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + gasket_stress._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(gasket_stress._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + gasket_stress._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( gasket_stress._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -588,7 +617,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -609,7 +638,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -630,7 +659,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -651,7 +680,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -672,7 +701,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -693,7 +722,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -714,7 +743,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -735,7 +764,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -756,7 +785,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -777,7 +806,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -812,11 +841,13 @@ class OutputsGasketStress(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_stress._spec().outputs, op) - self._fields_container = Output(gasket_stress._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + gasket_stress._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_stress_X.py b/src/ansys/dpf/core/operators/result/gasket_stress_X.py index 5ac24d6ef2d..c497231191c 100644 --- a/src/ansys/dpf/core/operators/result/gasket_stress_X.py +++ b/src/ansys/dpf/core/operators/result/gasket_stress_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_stress_X(Operator): r"""Read/compute elemental gasket stress XX normal component (00 component) @@ -287,31 +298,45 @@ class InputsGasketStressX(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_stress_X._spec().inputs, op) - self._time_scoping = Input(gasket_stress_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + gasket_stress_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(gasket_stress_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + gasket_stress_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(gasket_stress_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + gasket_stress_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(gasket_stress_X._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + gasket_stress_X._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(gasket_stress_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + gasket_stress_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_stress_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_stress_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_stress_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_stress_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(gasket_stress_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + gasket_stress_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -332,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -353,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -374,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -395,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -416,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -437,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -458,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -479,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,11 +539,13 @@ class OutputsGasketStressX(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_stress_X._spec().outputs, op) - self._fields_container = Output(gasket_stress_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + gasket_stress_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_stress_XY.py b/src/ansys/dpf/core/operators/result/gasket_stress_XY.py index ed85b3621c9..bcc492d13e0 100644 --- a/src/ansys/dpf/core/operators/result/gasket_stress_XY.py +++ b/src/ansys/dpf/core/operators/result/gasket_stress_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_stress_XY(Operator): r"""Read/compute elemental gasket stress XY shear component (01 component) @@ -287,33 +298,45 @@ class InputsGasketStressXy(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_stress_XY._spec().inputs, op) - self._time_scoping = Input(gasket_stress_XY._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + gasket_stress_XY._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(gasket_stress_XY._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + gasket_stress_XY._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(gasket_stress_XY._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + gasket_stress_XY._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_stress_XY._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(gasket_stress_XY._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + gasket_stress_XY._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_stress_XY._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_stress_XY._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_stress_XY._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_stress_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(gasket_stress_XY._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + gasket_stress_XY._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -334,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -355,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -376,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -397,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -418,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -439,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -460,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -481,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -516,11 +539,13 @@ class OutputsGasketStressXy(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_stress_XY._spec().outputs, op) - self._fields_container = Output(gasket_stress_XY._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + gasket_stress_XY._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_stress_XZ.py b/src/ansys/dpf/core/operators/result/gasket_stress_XZ.py index 31d6b3c9da7..a5d760a08eb 100644 --- a/src/ansys/dpf/core/operators/result/gasket_stress_XZ.py +++ b/src/ansys/dpf/core/operators/result/gasket_stress_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_stress_XZ(Operator): r"""Read/compute elemental gasket stress XZ shear component (02 component) @@ -287,33 +298,45 @@ class InputsGasketStressXz(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_stress_XZ._spec().inputs, op) - self._time_scoping = Input(gasket_stress_XZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + gasket_stress_XZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(gasket_stress_XZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + gasket_stress_XZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(gasket_stress_XZ._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + gasket_stress_XZ._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_stress_XZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(gasket_stress_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + gasket_stress_XZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_stress_XZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_stress_XZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_stress_XZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_stress_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(gasket_stress_XZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + gasket_stress_XZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -334,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -355,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -376,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -397,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -418,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -439,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -460,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -481,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -516,11 +539,13 @@ class OutputsGasketStressXz(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_stress_XZ._spec().outputs, op) - self._fields_container = Output(gasket_stress_XZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + gasket_stress_XZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_thermal_closure.py b/src/ansys/dpf/core/operators/result/gasket_thermal_closure.py index 365fe7e8bc6..612df15b65a 100644 --- a/src/ansys/dpf/core/operators/result/gasket_thermal_closure.py +++ b/src/ansys/dpf/core/operators/result/gasket_thermal_closure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_thermal_closure(Operator): r"""Read/compute elemental gasket thermal closure by calling the readers @@ -539,51 +550,53 @@ class InputsGasketThermalClosure(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_thermal_closure._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_thermal_closure._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_thermal_closure._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_thermal_closure._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_thermal_closure._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_thermal_closure._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_thermal_closure._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_thermal_closure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_thermal_closure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_thermal_closure._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( gasket_thermal_closure._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( gasket_thermal_closure._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( gasket_thermal_closure._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -604,7 +617,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -625,7 +638,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -646,7 +659,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -667,7 +680,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -688,7 +701,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -709,7 +722,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -730,7 +743,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -751,7 +764,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -772,7 +785,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -793,7 +806,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -828,13 +841,13 @@ class OutputsGasketThermalClosure(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_thermal_closure._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_thermal_closure._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_thermal_closure_X.py b/src/ansys/dpf/core/operators/result/gasket_thermal_closure_X.py index 48c2b13d699..19b6324d33f 100644 --- a/src/ansys/dpf/core/operators/result/gasket_thermal_closure_X.py +++ b/src/ansys/dpf/core/operators/result/gasket_thermal_closure_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_thermal_closure_X(Operator): r"""Read/compute elemental gasket thermal closure XX normal component (00 @@ -287,43 +298,45 @@ class InputsGasketThermalClosureX(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_thermal_closure_X._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_thermal_closure_X._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_thermal_closure_X._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_thermal_closure_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_thermal_closure_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_thermal_closure_X._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_thermal_closure_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_thermal_closure_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_thermal_closure_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_thermal_closure_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_thermal_closure_X._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -344,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -365,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -386,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -407,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -428,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -449,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -470,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -491,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -526,13 +539,13 @@ class OutputsGasketThermalClosureX(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_thermal_closure_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_thermal_closure_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_thermal_closure_XY.py b/src/ansys/dpf/core/operators/result/gasket_thermal_closure_XY.py index d59c478063b..aa947ab0750 100644 --- a/src/ansys/dpf/core/operators/result/gasket_thermal_closure_XY.py +++ b/src/ansys/dpf/core/operators/result/gasket_thermal_closure_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_thermal_closure_XY(Operator): r"""Read/compute elemental gasket thermal closure XY shear component (01 @@ -287,43 +298,45 @@ class InputsGasketThermalClosureXy(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_thermal_closure_XY._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_thermal_closure_XY._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_thermal_closure_XY._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_thermal_closure_XY._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_thermal_closure_XY._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_thermal_closure_XY._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_thermal_closure_XY._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_thermal_closure_XY._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_thermal_closure_XY._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_thermal_closure_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_thermal_closure_XY._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -344,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -365,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -386,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -407,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -428,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -449,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -470,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -491,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -526,13 +539,13 @@ class OutputsGasketThermalClosureXy(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_thermal_closure_XY._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_thermal_closure_XY._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_thermal_closure_XZ.py b/src/ansys/dpf/core/operators/result/gasket_thermal_closure_XZ.py index bd1f8316671..4c31bcca3d2 100644 --- a/src/ansys/dpf/core/operators/result/gasket_thermal_closure_XZ.py +++ b/src/ansys/dpf/core/operators/result/gasket_thermal_closure_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_thermal_closure_XZ(Operator): r"""Read/compute elemental gasket thermal closure XZ shear component (02 @@ -287,43 +298,45 @@ class InputsGasketThermalClosureXz(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_thermal_closure_XZ._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_thermal_closure_XZ._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_thermal_closure_XZ._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_thermal_closure_XZ._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_thermal_closure_XZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_thermal_closure_XZ._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_thermal_closure_XZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_thermal_closure_XZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_thermal_closure_XZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_thermal_closure_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_thermal_closure_XZ._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -344,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -365,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -386,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -407,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -428,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -449,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -470,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -491,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -526,13 +539,13 @@ class OutputsGasketThermalClosureXz(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_thermal_closure_XZ._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_thermal_closure_XZ._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_total_closure.py b/src/ansys/dpf/core/operators/result/gasket_total_closure.py index 3715b1cc719..03de81d42d1 100644 --- a/src/ansys/dpf/core/operators/result/gasket_total_closure.py +++ b/src/ansys/dpf/core/operators/result/gasket_total_closure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_total_closure(Operator): r"""computes the gasket total closure (sum of gasket thermal closure and @@ -283,37 +294,45 @@ class InputsGasketTotalClosure(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_total_closure._spec().inputs, op) - self._time_scoping = Input(gasket_total_closure._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + gasket_total_closure._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(gasket_total_closure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + gasket_total_closure._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_total_closure._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_total_closure._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(gasket_total_closure._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + gasket_total_closure._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_total_closure._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_total_closure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_total_closure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_total_closure._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_total_closure._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -334,7 +353,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -355,7 +374,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -376,7 +395,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -397,7 +416,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -418,7 +437,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -439,7 +458,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -460,7 +479,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -481,7 +500,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -516,13 +535,13 @@ class OutputsGasketTotalClosure(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_total_closure._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_total_closure._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_total_closure_X.py b/src/ansys/dpf/core/operators/result/gasket_total_closure_X.py index 5d1d51a2e1f..fb8be0e7160 100644 --- a/src/ansys/dpf/core/operators/result/gasket_total_closure_X.py +++ b/src/ansys/dpf/core/operators/result/gasket_total_closure_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_total_closure_X(Operator): r"""Read/compute elemental gasket total closure XX normal component (00 @@ -287,43 +298,45 @@ class InputsGasketTotalClosureX(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_total_closure_X._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_total_closure_X._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_total_closure_X._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_total_closure_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_total_closure_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_total_closure_X._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_total_closure_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_total_closure_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_total_closure_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_total_closure_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_total_closure_X._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -344,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -365,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -386,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -407,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -428,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -449,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -470,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -491,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -526,13 +539,13 @@ class OutputsGasketTotalClosureX(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_total_closure_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_total_closure_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_total_closure_XY.py b/src/ansys/dpf/core/operators/result/gasket_total_closure_XY.py index 4f07402ffa5..303488baab4 100644 --- a/src/ansys/dpf/core/operators/result/gasket_total_closure_XY.py +++ b/src/ansys/dpf/core/operators/result/gasket_total_closure_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_total_closure_XY(Operator): r"""Read/compute elemental gasket total closure XY shear component (01 @@ -287,43 +298,45 @@ class InputsGasketTotalClosureXy(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_total_closure_XY._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_total_closure_XY._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_total_closure_XY._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_total_closure_XY._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_total_closure_XY._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_total_closure_XY._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_total_closure_XY._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_total_closure_XY._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_total_closure_XY._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_total_closure_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_total_closure_XY._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -344,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -365,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -386,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -407,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -428,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -449,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -470,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -491,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -526,13 +539,13 @@ class OutputsGasketTotalClosureXy(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_total_closure_XY._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_total_closure_XY._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/gasket_total_closure_XZ.py b/src/ansys/dpf/core/operators/result/gasket_total_closure_XZ.py index 550601c6873..4abccaf4879 100644 --- a/src/ansys/dpf/core/operators/result/gasket_total_closure_XZ.py +++ b/src/ansys/dpf/core/operators/result/gasket_total_closure_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class gasket_total_closure_XZ(Operator): r"""Read/compute elemental gasket total closure XZ shear component (02 @@ -287,43 +298,45 @@ class InputsGasketTotalClosureXz(_Inputs): def __init__(self, op: Operator): super().__init__(gasket_total_closure_XZ._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( gasket_total_closure_XZ._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( gasket_total_closure_XZ._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( gasket_total_closure_XZ._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( gasket_total_closure_XZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( gasket_total_closure_XZ._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( gasket_total_closure_XZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(gasket_total_closure_XZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + gasket_total_closure_XZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( gasket_total_closure_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( gasket_total_closure_XZ._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -344,7 +357,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -365,7 +378,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -386,7 +399,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -407,7 +420,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -428,7 +441,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -449,7 +462,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -470,7 +483,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -491,7 +504,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -526,13 +539,13 @@ class OutputsGasketTotalClosureXz(_Outputs): def __init__(self, op: Operator): super().__init__(gasket_total_closure_XZ._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( gasket_total_closure_XZ._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_added_mass.py b/src/ansys/dpf/core/operators/result/global_added_mass.py index 29375c3b274..ef980447fc7 100644 --- a/src/ansys/dpf/core/operators/result/global_added_mass.py +++ b/src/ansys/dpf/core/operators/result/global_added_mass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_added_mass(Operator): r"""Read Global Added Mass (LSDyna) by calling the readers defined by the @@ -184,17 +191,21 @@ class InputsGlobalAddedMass(_Inputs): def __init__(self, op: Operator): super().__init__(global_added_mass._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_added_mass._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(global_added_mass._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + global_added_mass._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(global_added_mass._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + global_added_mass._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -215,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -236,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -271,11 +282,13 @@ class OutputsGlobalAddedMass(_Outputs): def __init__(self, op: Operator): super().__init__(global_added_mass._spec().outputs, op) - self._fields_container = Output(global_added_mass._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + global_added_mass._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_added_mass_pct.py b/src/ansys/dpf/core/operators/result/global_added_mass_pct.py index 09904f3f6f3..d15a690289a 100644 --- a/src/ansys/dpf/core/operators/result/global_added_mass_pct.py +++ b/src/ansys/dpf/core/operators/result/global_added_mass_pct.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_added_mass_pct(Operator): r"""Read Global Added Mass (percentage) (LSDyna) by calling the readers @@ -184,21 +191,21 @@ class InputsGlobalAddedMassPct(_Inputs): def __init__(self, op: Operator): super().__init__(global_added_mass_pct._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_added_mass_pct._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_added_mass_pct._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_added_mass_pct._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalAddedMassPct(_Outputs): def __init__(self, op: Operator): super().__init__(global_added_mass_pct._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_added_mass_pct._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_center_mass.py b/src/ansys/dpf/core/operators/result/global_center_mass.py index 4f7c600ed13..fbf7fc014c6 100644 --- a/src/ansys/dpf/core/operators/result/global_center_mass.py +++ b/src/ansys/dpf/core/operators/result/global_center_mass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_center_mass(Operator): r"""Read Global Center of Mass (LSDyna) by calling the readers defined by @@ -184,17 +191,21 @@ class InputsGlobalCenterMass(_Inputs): def __init__(self, op: Operator): super().__init__(global_center_mass._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_center_mass._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(global_center_mass._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + global_center_mass._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(global_center_mass._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + global_center_mass._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -215,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -236,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -271,11 +282,13 @@ class OutputsGlobalCenterMass(_Outputs): def __init__(self, op: Operator): super().__init__(global_center_mass._spec().outputs, op) - self._fields_container = Output(global_center_mass._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + global_center_mass._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_energy_ratio.py b/src/ansys/dpf/core/operators/result/global_energy_ratio.py index 66f23267c60..13ff0a02481 100644 --- a/src/ansys/dpf/core/operators/result/global_energy_ratio.py +++ b/src/ansys/dpf/core/operators/result/global_energy_ratio.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_energy_ratio(Operator): r"""Read Global Energy Ratio (LSDyna) by calling the readers defined by the @@ -184,17 +191,21 @@ class InputsGlobalEnergyRatio(_Inputs): def __init__(self, op: Operator): super().__init__(global_energy_ratio._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_energy_ratio._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(global_energy_ratio._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + global_energy_ratio._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(global_energy_ratio._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + global_energy_ratio._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -215,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -236,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -271,13 +282,13 @@ class OutputsGlobalEnergyRatio(_Outputs): def __init__(self, op: Operator): super().__init__(global_energy_ratio._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_energy_ratio._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_energy_ratio_wo_eroded.py b/src/ansys/dpf/core/operators/result/global_energy_ratio_wo_eroded.py index 07af3ddddf9..86e0e8430c7 100644 --- a/src/ansys/dpf/core/operators/result/global_energy_ratio_wo_eroded.py +++ b/src/ansys/dpf/core/operators/result/global_energy_ratio_wo_eroded.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_energy_ratio_wo_eroded(Operator): r"""Read Global Energy ratio without Eroded Energy (LSDyna) by calling the @@ -184,21 +191,21 @@ class InputsGlobalEnergyRatioWoEroded(_Inputs): def __init__(self, op: Operator): super().__init__(global_energy_ratio_wo_eroded._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_energy_ratio_wo_eroded._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_energy_ratio_wo_eroded._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_energy_ratio_wo_eroded._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalEnergyRatioWoEroded(_Outputs): def __init__(self, op: Operator): super().__init__(global_energy_ratio_wo_eroded._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_energy_ratio_wo_eroded._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_eroded_hourglass_energy.py b/src/ansys/dpf/core/operators/result/global_eroded_hourglass_energy.py index dd0d4e7309d..935aadf03a6 100644 --- a/src/ansys/dpf/core/operators/result/global_eroded_hourglass_energy.py +++ b/src/ansys/dpf/core/operators/result/global_eroded_hourglass_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_eroded_hourglass_energy(Operator): r"""Read Global Eroded Hourglass Energy (LSDyna) by calling the readers @@ -184,21 +191,21 @@ class InputsGlobalErodedHourglassEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_eroded_hourglass_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_eroded_hourglass_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_eroded_hourglass_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_eroded_hourglass_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalErodedHourglassEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_eroded_hourglass_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_eroded_hourglass_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_eroded_internal_energy.py b/src/ansys/dpf/core/operators/result/global_eroded_internal_energy.py index 5e124f26118..fc01f6a3e31 100644 --- a/src/ansys/dpf/core/operators/result/global_eroded_internal_energy.py +++ b/src/ansys/dpf/core/operators/result/global_eroded_internal_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_eroded_internal_energy(Operator): r"""Read Global Eroded Internal Energy (LSDyna) by calling the readers @@ -184,21 +191,21 @@ class InputsGlobalErodedInternalEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_eroded_internal_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_eroded_internal_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_eroded_internal_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_eroded_internal_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalErodedInternalEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_eroded_internal_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_eroded_internal_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_eroded_kinetic_energy.py b/src/ansys/dpf/core/operators/result/global_eroded_kinetic_energy.py index 5d80a16c974..8df1c1c101c 100644 --- a/src/ansys/dpf/core/operators/result/global_eroded_kinetic_energy.py +++ b/src/ansys/dpf/core/operators/result/global_eroded_kinetic_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_eroded_kinetic_energy(Operator): r"""Read Global Eroded Kinetic Energy (LSDyna) by calling the readers @@ -184,21 +191,21 @@ class InputsGlobalErodedKineticEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_eroded_kinetic_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_eroded_kinetic_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_eroded_kinetic_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_eroded_kinetic_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalErodedKineticEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_eroded_kinetic_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_eroded_kinetic_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_external_work.py b/src/ansys/dpf/core/operators/result/global_external_work.py index be46c1305f9..38ee721e052 100644 --- a/src/ansys/dpf/core/operators/result/global_external_work.py +++ b/src/ansys/dpf/core/operators/result/global_external_work.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_external_work(Operator): r"""Read Global External Work (LSDyna) by calling the readers defined by the @@ -184,19 +191,21 @@ class InputsGlobalExternalWork(_Inputs): def __init__(self, op: Operator): super().__init__(global_external_work._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_external_work._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(global_external_work._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + global_external_work._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_external_work._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -217,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -238,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -273,13 +282,13 @@ class OutputsGlobalExternalWork(_Outputs): def __init__(self, op: Operator): super().__init__(global_external_work._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_external_work._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_hourglass_energy.py b/src/ansys/dpf/core/operators/result/global_hourglass_energy.py index be2381f5412..d82cf3de71d 100644 --- a/src/ansys/dpf/core/operators/result/global_hourglass_energy.py +++ b/src/ansys/dpf/core/operators/result/global_hourglass_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_hourglass_energy(Operator): r"""Read Global Hourglass Energy (LSDyna) by calling the readers defined by @@ -184,21 +191,21 @@ class InputsGlobalHourglassEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_hourglass_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_hourglass_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_hourglass_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_hourglass_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalHourglassEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_hourglass_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_hourglass_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_internal_energy.py b/src/ansys/dpf/core/operators/result/global_internal_energy.py index 2c7e7e8621a..0e2231ff7ea 100644 --- a/src/ansys/dpf/core/operators/result/global_internal_energy.py +++ b/src/ansys/dpf/core/operators/result/global_internal_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_internal_energy(Operator): r"""Read Global Internal Energy (LSDyna) by calling the readers defined by @@ -184,21 +191,21 @@ class InputsGlobalInternalEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_internal_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_internal_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_internal_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_internal_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalInternalEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_internal_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_internal_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_joint_internal_energy.py b/src/ansys/dpf/core/operators/result/global_joint_internal_energy.py index da6ad154c14..d43f53dbb6b 100644 --- a/src/ansys/dpf/core/operators/result/global_joint_internal_energy.py +++ b/src/ansys/dpf/core/operators/result/global_joint_internal_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_joint_internal_energy(Operator): r"""Read Global Joint Internal Energy (LSDyna) by calling the readers @@ -184,21 +191,21 @@ class InputsGlobalJointInternalEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_joint_internal_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_joint_internal_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_joint_internal_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_joint_internal_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalJointInternalEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_joint_internal_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_joint_internal_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_kinetic_energy.py b/src/ansys/dpf/core/operators/result/global_kinetic_energy.py index e4449c5b1b5..bc423a3f790 100644 --- a/src/ansys/dpf/core/operators/result/global_kinetic_energy.py +++ b/src/ansys/dpf/core/operators/result/global_kinetic_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_kinetic_energy(Operator): r"""Read Global Kinetic Energy (LSDyna) by calling the readers defined by @@ -184,21 +191,21 @@ class InputsGlobalKineticEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_kinetic_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_kinetic_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_kinetic_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_kinetic_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalKineticEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_kinetic_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_kinetic_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_rigid_body_stopper_energy.py b/src/ansys/dpf/core/operators/result/global_rigid_body_stopper_energy.py index 11672f7f92d..31304d2196e 100644 --- a/src/ansys/dpf/core/operators/result/global_rigid_body_stopper_energy.py +++ b/src/ansys/dpf/core/operators/result/global_rigid_body_stopper_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_rigid_body_stopper_energy(Operator): r"""Read Global Rigid Body Stopper Energy (LSDyna) by calling the readers @@ -184,21 +191,21 @@ class InputsGlobalRigidBodyStopperEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_rigid_body_stopper_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_rigid_body_stopper_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_rigid_body_stopper_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_rigid_body_stopper_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalRigidBodyStopperEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_rigid_body_stopper_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_rigid_body_stopper_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_sliding_interface_energy.py b/src/ansys/dpf/core/operators/result/global_sliding_interface_energy.py index 6b07858025d..86e02edf739 100644 --- a/src/ansys/dpf/core/operators/result/global_sliding_interface_energy.py +++ b/src/ansys/dpf/core/operators/result/global_sliding_interface_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_sliding_interface_energy(Operator): r"""Read Global Sliding Interface Energy (LSDyna) by calling the readers @@ -184,21 +191,21 @@ class InputsGlobalSlidingInterfaceEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_sliding_interface_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_sliding_interface_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_sliding_interface_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_sliding_interface_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalSlidingInterfaceEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_sliding_interface_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_sliding_interface_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_spring_damper_energy.py b/src/ansys/dpf/core/operators/result/global_spring_damper_energy.py index db60aadfb43..3b857b5efb0 100644 --- a/src/ansys/dpf/core/operators/result/global_spring_damper_energy.py +++ b/src/ansys/dpf/core/operators/result/global_spring_damper_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_spring_damper_energy(Operator): r"""Read Global Spring and Damper Energy (LSDyna) by calling the readers @@ -184,21 +191,21 @@ class InputsGlobalSpringDamperEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_spring_damper_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_spring_damper_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_spring_damper_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_spring_damper_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalSpringDamperEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_spring_damper_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_spring_damper_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_system_damping_energy.py b/src/ansys/dpf/core/operators/result/global_system_damping_energy.py index 0496a65b492..2fd2f64c0fd 100644 --- a/src/ansys/dpf/core/operators/result/global_system_damping_energy.py +++ b/src/ansys/dpf/core/operators/result/global_system_damping_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_system_damping_energy(Operator): r"""Read Global System Damping Energy (LSDyna) by calling the readers @@ -184,21 +191,21 @@ class InputsGlobalSystemDampingEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_system_damping_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_system_damping_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( global_system_damping_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( global_system_damping_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -219,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -240,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -275,13 +282,13 @@ class OutputsGlobalSystemDampingEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_system_damping_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_system_damping_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_time_step.py b/src/ansys/dpf/core/operators/result/global_time_step.py index 4e4bc3c048b..ade84a38802 100644 --- a/src/ansys/dpf/core/operators/result/global_time_step.py +++ b/src/ansys/dpf/core/operators/result/global_time_step.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_time_step(Operator): r"""Read Global Time Step (LSDyna) by calling the readers defined by the @@ -184,17 +191,21 @@ class InputsGlobalTimeStep(_Inputs): def __init__(self, op: Operator): super().__init__(global_time_step._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_time_step._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(global_time_step._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + global_time_step._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(global_time_step._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + global_time_step._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -215,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -236,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -271,11 +282,13 @@ class OutputsGlobalTimeStep(_Outputs): def __init__(self, op: Operator): super().__init__(global_time_step._spec().outputs, op) - self._fields_container = Output(global_time_step._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + global_time_step._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_to_nodal.py b/src/ansys/dpf/core/operators/result/global_to_nodal.py index e4302f805d8..0526218a0a8 100644 --- a/src/ansys/dpf/core/operators/result/global_to_nodal.py +++ b/src/ansys/dpf/core/operators/result/global_to_nodal.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class global_to_nodal(Operator): r"""Rotate results from global coordinate system to local coordinate system. @@ -157,13 +161,17 @@ class InputsGlobalToNodal(_Inputs): def __init__(self, op: Operator): super().__init__(global_to_nodal._spec().inputs, op) - self._fieldA = Input(global_to_nodal._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field] = Input( + global_to_nodal._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(global_to_nodal._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field] = Input( + global_to_nodal._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field]: r"""Allows to connect fieldA input to the operator. Vector or tensor field that must be rotated, expressed in global coordinate system. @@ -184,7 +192,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field]: r"""Allows to connect fieldB input to the operator. Nodal euler angles defined from a result file. Those must be the rotations from Nodal to Global. @@ -219,11 +227,13 @@ class OutputsGlobalToNodal(_Outputs): def __init__(self, op: Operator): super().__init__(global_to_nodal._spec().outputs, op) - self._field = Output(global_to_nodal._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + global_to_nodal._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Rotated field diff --git a/src/ansys/dpf/core/operators/result/global_total_energy.py b/src/ansys/dpf/core/operators/result/global_total_energy.py index dadfcdff1e5..e93254333df 100644 --- a/src/ansys/dpf/core/operators/result/global_total_energy.py +++ b/src/ansys/dpf/core/operators/result/global_total_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_total_energy(Operator): r"""Read Global Total Energy (LSDyna) by calling the readers defined by the @@ -184,17 +191,21 @@ class InputsGlobalTotalEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(global_total_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_total_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(global_total_energy._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + global_total_energy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(global_total_energy._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + global_total_energy._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -215,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -236,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -271,13 +282,13 @@ class OutputsGlobalTotalEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(global_total_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( global_total_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_total_mass.py b/src/ansys/dpf/core/operators/result/global_total_mass.py index e9fc6df292c..28c07654bcd 100644 --- a/src/ansys/dpf/core/operators/result/global_total_mass.py +++ b/src/ansys/dpf/core/operators/result/global_total_mass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_total_mass(Operator): r"""Read Global Total Mass (LSDyna) by calling the readers defined by the @@ -184,17 +191,21 @@ class InputsGlobalTotalMass(_Inputs): def __init__(self, op: Operator): super().__init__(global_total_mass._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( global_total_mass._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(global_total_mass._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + global_total_mass._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(global_total_mass._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + global_total_mass._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -215,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -236,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -271,11 +282,13 @@ class OutputsGlobalTotalMass(_Outputs): def __init__(self, op: Operator): super().__init__(global_total_mass._spec().outputs, op) - self._fields_container = Output(global_total_mass._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + global_total_mass._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/global_velocity.py b/src/ansys/dpf/core/operators/result/global_velocity.py index 8d2c49cc878..688747df509 100644 --- a/src/ansys/dpf/core/operators/result/global_velocity.py +++ b/src/ansys/dpf/core/operators/result/global_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class global_velocity(Operator): r"""Read Global Velocity (LSDyna) by calling the readers defined by the @@ -184,15 +191,21 @@ class InputsGlobalVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(global_velocity._spec().inputs, op) - self._streams_container = Input(global_velocity._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + global_velocity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(global_velocity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + global_velocity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._unit_system = Input(global_velocity._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + global_velocity._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -213,7 +226,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -234,7 +247,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -269,11 +282,13 @@ class OutputsGlobalVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(global_velocity._spec().outputs, op) - self._fields_container = Output(global_velocity._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + global_velocity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/heat_flux.py b/src/ansys/dpf/core/operators/result/heat_flux.py index 5082ee1102b..6d45958bb4b 100644 --- a/src/ansys/dpf/core/operators/result/heat_flux.py +++ b/src/ansys/dpf/core/operators/result/heat_flux.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class heat_flux(Operator): r"""Read/compute heat flux by calling the readers defined by the @@ -619,43 +630,71 @@ class InputsHeatFlux(_Inputs): def __init__(self, op: Operator): super().__init__(heat_flux._spec().inputs, op) - self._time_scoping = Input(heat_flux._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + heat_flux._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(heat_flux._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + heat_flux._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(heat_flux._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + heat_flux._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(heat_flux._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + heat_flux._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(heat_flux._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + heat_flux._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(heat_flux._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + heat_flux._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(heat_flux._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + heat_flux._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(heat_flux._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + heat_flux._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(heat_flux._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + heat_flux._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( heat_flux._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(heat_flux._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( + heat_flux._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(heat_flux._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input(heat_flux._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) - self._read_beams = Input(heat_flux._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + heat_flux._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(heat_flux._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + heat_flux._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(heat_flux._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + heat_flux._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input(heat_flux._spec().input_pin(28), 28, op, -1) + self._extend_to_mid_nodes: Input[bool] = Input( + heat_flux._spec().input_pin(28), 28, op, -1 + ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -676,7 +715,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -697,7 +736,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -718,7 +757,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -739,7 +778,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -760,7 +799,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -781,7 +820,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -802,7 +841,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -823,7 +862,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -844,7 +883,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -865,7 +904,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -886,7 +925,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -907,7 +946,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -928,7 +967,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -949,7 +988,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -970,7 +1009,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1005,11 +1044,13 @@ class OutputsHeatFlux(_Outputs): def __init__(self, op: Operator): super().__init__(heat_flux._spec().outputs, op) - self._fields_container = Output(heat_flux._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + heat_flux._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/heat_flux_X.py b/src/ansys/dpf/core/operators/result/heat_flux_X.py index 01b753026e6..b2ece43ef6f 100644 --- a/src/ansys/dpf/core/operators/result/heat_flux_X.py +++ b/src/ansys/dpf/core/operators/result/heat_flux_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class heat_flux_X(Operator): r"""Read/compute heat flux X component of the vector (1st component) by @@ -303,29 +314,49 @@ class InputsHeatFluxX(_Inputs): def __init__(self, op: Operator): super().__init__(heat_flux_X._spec().inputs, op) - self._time_scoping = Input(heat_flux_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + heat_flux_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(heat_flux_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + heat_flux_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(heat_flux_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + heat_flux_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(heat_flux_X._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + heat_flux_X._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(heat_flux_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + heat_flux_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(heat_flux_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + heat_flux_X._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(heat_flux_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + heat_flux_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(heat_flux_X._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + heat_flux_X._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(heat_flux_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + heat_flux_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(heat_flux_X._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + heat_flux_X._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -346,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -367,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -388,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -409,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -430,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -451,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -472,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -493,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -549,11 +580,13 @@ class OutputsHeatFluxX(_Outputs): def __init__(self, op: Operator): super().__init__(heat_flux_X._spec().outputs, op) - self._fields_container = Output(heat_flux_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + heat_flux_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/heat_flux_Y.py b/src/ansys/dpf/core/operators/result/heat_flux_Y.py index 96f8a21632b..b22e67f1552 100644 --- a/src/ansys/dpf/core/operators/result/heat_flux_Y.py +++ b/src/ansys/dpf/core/operators/result/heat_flux_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class heat_flux_Y(Operator): r"""Read/compute heat flux Y component of the vector (2nd component) by @@ -303,29 +314,49 @@ class InputsHeatFluxY(_Inputs): def __init__(self, op: Operator): super().__init__(heat_flux_Y._spec().inputs, op) - self._time_scoping = Input(heat_flux_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + heat_flux_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(heat_flux_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + heat_flux_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(heat_flux_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + heat_flux_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(heat_flux_Y._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + heat_flux_Y._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(heat_flux_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + heat_flux_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(heat_flux_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + heat_flux_Y._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(heat_flux_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + heat_flux_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(heat_flux_Y._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + heat_flux_Y._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(heat_flux_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + heat_flux_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(heat_flux_Y._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + heat_flux_Y._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -346,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -367,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -388,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -409,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -430,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -451,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -472,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -493,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -549,11 +580,13 @@ class OutputsHeatFluxY(_Outputs): def __init__(self, op: Operator): super().__init__(heat_flux_Y._spec().outputs, op) - self._fields_container = Output(heat_flux_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + heat_flux_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/heat_flux_Z.py b/src/ansys/dpf/core/operators/result/heat_flux_Z.py index 1c64cdb53fd..fa025000003 100644 --- a/src/ansys/dpf/core/operators/result/heat_flux_Z.py +++ b/src/ansys/dpf/core/operators/result/heat_flux_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class heat_flux_Z(Operator): r"""Read/compute heat flux Z component of the vector (3rd component) by @@ -303,29 +314,49 @@ class InputsHeatFluxZ(_Inputs): def __init__(self, op: Operator): super().__init__(heat_flux_Z._spec().inputs, op) - self._time_scoping = Input(heat_flux_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + heat_flux_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(heat_flux_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + heat_flux_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(heat_flux_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + heat_flux_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(heat_flux_Z._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + heat_flux_Z._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(heat_flux_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + heat_flux_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(heat_flux_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + heat_flux_Z._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(heat_flux_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + heat_flux_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(heat_flux_Z._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + heat_flux_Z._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(heat_flux_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + heat_flux_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(heat_flux_Z._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + heat_flux_Z._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -346,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -367,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -388,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -409,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -430,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -451,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -472,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -493,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -549,11 +580,13 @@ class OutputsHeatFluxZ(_Outputs): def __init__(self, op: Operator): super().__init__(heat_flux_Z._spec().outputs, op) - self._fields_container = Output(heat_flux_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + heat_flux_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/hydrostatic_pressure.py b/src/ansys/dpf/core/operators/result/hydrostatic_pressure.py index b350e1ba598..8b03f070f5e 100644 --- a/src/ansys/dpf/core/operators/result/hydrostatic_pressure.py +++ b/src/ansys/dpf/core/operators/result/hydrostatic_pressure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class hydrostatic_pressure(Operator): r"""Read/compute element nodal hydrostatic pressure by calling the readers @@ -555,47 +566,57 @@ class InputsHydrostaticPressure(_Inputs): def __init__(self, op: Operator): super().__init__(hydrostatic_pressure._spec().inputs, op) - self._time_scoping = Input(hydrostatic_pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + hydrostatic_pressure._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(hydrostatic_pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + hydrostatic_pressure._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( hydrostatic_pressure._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( hydrostatic_pressure._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(hydrostatic_pressure._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + hydrostatic_pressure._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( hydrostatic_pressure._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(hydrostatic_pressure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + hydrostatic_pressure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( hydrostatic_pressure._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input(hydrostatic_pressure._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + hydrostatic_pressure._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( hydrostatic_pressure._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( hydrostatic_pressure._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( hydrostatic_pressure._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -616,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -637,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -658,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -679,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -700,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -721,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -742,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -763,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -784,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -805,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -826,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -861,13 +882,13 @@ class OutputsHydrostaticPressure(_Outputs): def __init__(self, op: Operator): super().__init__(hydrostatic_pressure._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( hydrostatic_pressure._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/incremental_energy.py b/src/ansys/dpf/core/operators/result/incremental_energy.py index 51f6b41dc24..ca9b1743190 100644 --- a/src/ansys/dpf/core/operators/result/incremental_energy.py +++ b/src/ansys/dpf/core/operators/result/incremental_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class incremental_energy(Operator): r"""Read/compute incremental energy (magnetics) by calling the readers @@ -251,29 +262,37 @@ class InputsIncrementalEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(incremental_energy._spec().inputs, op) - self._time_scoping = Input(incremental_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + incremental_energy._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(incremental_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + incremental_energy._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( incremental_energy._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( incremental_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(incremental_energy._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + incremental_energy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( incremental_energy._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(incremental_energy._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + incremental_energy._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,11 +453,13 @@ class OutputsIncrementalEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(incremental_energy._spec().outputs, op) - self._fields_container = Output(incremental_energy._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + incremental_energy._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/initial_coordinates.py b/src/ansys/dpf/core/operators/result/initial_coordinates.py index 5f1e3d1e266..e510381496f 100644 --- a/src/ansys/dpf/core/operators/result/initial_coordinates.py +++ b/src/ansys/dpf/core/operators/result/initial_coordinates.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class initial_coordinates(Operator): r"""Read/compute Initial Coordinates (LSDyna) by calling the readers defined @@ -251,29 +262,37 @@ class InputsInitialCoordinates(_Inputs): def __init__(self, op: Operator): super().__init__(initial_coordinates._spec().inputs, op) - self._time_scoping = Input(initial_coordinates._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + initial_coordinates._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(initial_coordinates._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + initial_coordinates._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( initial_coordinates._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( initial_coordinates._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(initial_coordinates._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + initial_coordinates._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( initial_coordinates._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(initial_coordinates._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + initial_coordinates._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,13 +453,13 @@ class OutputsInitialCoordinates(_Outputs): def __init__(self, op: Operator): super().__init__(initial_coordinates._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( initial_coordinates._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/interface_contact_area.py b/src/ansys/dpf/core/operators/result/interface_contact_area.py index 5ad5dcd6c2e..ae79744108a 100644 --- a/src/ansys/dpf/core/operators/result/interface_contact_area.py +++ b/src/ansys/dpf/core/operators/result/interface_contact_area.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class interface_contact_area(Operator): r"""Read Interface Contact Area (LSDyna) by calling the readers defined by @@ -200,25 +208,25 @@ class InputsInterfaceContactArea(_Inputs): def __init__(self, op: Operator): super().__init__(interface_contact_area._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( interface_contact_area._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( interface_contact_area._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( interface_contact_area._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( interface_contact_area._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -239,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -260,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -281,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -316,13 +324,13 @@ class OutputsInterfaceContactArea(_Outputs): def __init__(self, op: Operator): super().__init__(interface_contact_area._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( interface_contact_area._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/interface_contact_force.py b/src/ansys/dpf/core/operators/result/interface_contact_force.py index e140a6a3da8..6cca86443f5 100644 --- a/src/ansys/dpf/core/operators/result/interface_contact_force.py +++ b/src/ansys/dpf/core/operators/result/interface_contact_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class interface_contact_force(Operator): r"""Read Interface Contact Force (LSDyna) by calling the readers defined by @@ -200,25 +208,25 @@ class InputsInterfaceContactForce(_Inputs): def __init__(self, op: Operator): super().__init__(interface_contact_force._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( interface_contact_force._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( interface_contact_force._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( interface_contact_force._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( interface_contact_force._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -239,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -260,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -281,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -316,13 +324,13 @@ class OutputsInterfaceContactForce(_Outputs): def __init__(self, op: Operator): super().__init__(interface_contact_force._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( interface_contact_force._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/interface_contact_mass.py b/src/ansys/dpf/core/operators/result/interface_contact_mass.py index e216420cde4..c4a3cd7a0e9 100644 --- a/src/ansys/dpf/core/operators/result/interface_contact_mass.py +++ b/src/ansys/dpf/core/operators/result/interface_contact_mass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class interface_contact_mass(Operator): r"""Read Interface Contact Mass (LSDyna) by calling the readers defined by @@ -200,25 +208,25 @@ class InputsInterfaceContactMass(_Inputs): def __init__(self, op: Operator): super().__init__(interface_contact_mass._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( interface_contact_mass._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( interface_contact_mass._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( interface_contact_mass._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( interface_contact_mass._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -239,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -260,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -281,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -316,13 +324,13 @@ class OutputsInterfaceContactMass(_Outputs): def __init__(self, op: Operator): super().__init__(interface_contact_mass._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( interface_contact_mass._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/interface_contact_moment.py b/src/ansys/dpf/core/operators/result/interface_contact_moment.py index 33ffe9af166..1d0d5fd48ff 100644 --- a/src/ansys/dpf/core/operators/result/interface_contact_moment.py +++ b/src/ansys/dpf/core/operators/result/interface_contact_moment.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class interface_contact_moment(Operator): r"""Read Interface Contact Moment (LSDyna) by calling the readers defined by @@ -200,25 +208,25 @@ class InputsInterfaceContactMoment(_Inputs): def __init__(self, op: Operator): super().__init__(interface_contact_moment._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( interface_contact_moment._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( interface_contact_moment._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( interface_contact_moment._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( interface_contact_moment._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -239,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -260,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -281,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -316,13 +324,13 @@ class OutputsInterfaceContactMoment(_Outputs): def __init__(self, op: Operator): super().__init__(interface_contact_moment._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( interface_contact_moment._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/interface_resultant_contact_force.py b/src/ansys/dpf/core/operators/result/interface_resultant_contact_force.py index f372d5371e6..b4fe7087cf0 100644 --- a/src/ansys/dpf/core/operators/result/interface_resultant_contact_force.py +++ b/src/ansys/dpf/core/operators/result/interface_resultant_contact_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class interface_resultant_contact_force(Operator): r"""Read Interface Resultant Contact Force (LSDyna) by calling the readers @@ -200,25 +208,25 @@ class InputsInterfaceResultantContactForce(_Inputs): def __init__(self, op: Operator): super().__init__(interface_resultant_contact_force._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( interface_resultant_contact_force._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( interface_resultant_contact_force._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( interface_resultant_contact_force._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( interface_resultant_contact_force._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -239,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -260,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -281,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -316,13 +324,13 @@ class OutputsInterfaceResultantContactForce(_Outputs): def __init__(self, op: Operator): super().__init__(interface_resultant_contact_force._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( interface_resultant_contact_force._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/joint_force_reaction.py b/src/ansys/dpf/core/operators/result/joint_force_reaction.py index 12aa021a7ed..e3fe1fe9024 100644 --- a/src/ansys/dpf/core/operators/result/joint_force_reaction.py +++ b/src/ansys/dpf/core/operators/result/joint_force_reaction.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class joint_force_reaction(Operator): r"""Read/compute joint force reaction by calling the readers defined by the @@ -251,29 +262,37 @@ class InputsJointForceReaction(_Inputs): def __init__(self, op: Operator): super().__init__(joint_force_reaction._spec().inputs, op) - self._time_scoping = Input(joint_force_reaction._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + joint_force_reaction._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(joint_force_reaction._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + joint_force_reaction._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( joint_force_reaction._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( joint_force_reaction._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(joint_force_reaction._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + joint_force_reaction._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( joint_force_reaction._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(joint_force_reaction._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + joint_force_reaction._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,13 +453,13 @@ class OutputsJointForceReaction(_Outputs): def __init__(self, op: Operator): super().__init__(joint_force_reaction._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( joint_force_reaction._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/joint_moment_reaction.py b/src/ansys/dpf/core/operators/result/joint_moment_reaction.py index 8aab871b623..24ae80defae 100644 --- a/src/ansys/dpf/core/operators/result/joint_moment_reaction.py +++ b/src/ansys/dpf/core/operators/result/joint_moment_reaction.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class joint_moment_reaction(Operator): r"""Read/compute joint moment reaction by calling the readers defined by the @@ -251,35 +262,37 @@ class InputsJointMomentReaction(_Inputs): def __init__(self, op: Operator): super().__init__(joint_moment_reaction._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( joint_moment_reaction._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( joint_moment_reaction._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( joint_moment_reaction._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( joint_moment_reaction._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( joint_moment_reaction._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( joint_moment_reaction._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(joint_moment_reaction._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + joint_moment_reaction._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsJointMomentReaction(_Outputs): def __init__(self, op: Operator): super().__init__(joint_moment_reaction._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( joint_moment_reaction._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/joint_relative_acceleration.py b/src/ansys/dpf/core/operators/result/joint_relative_acceleration.py index 73c61ed4733..dd25eaa73b1 100644 --- a/src/ansys/dpf/core/operators/result/joint_relative_acceleration.py +++ b/src/ansys/dpf/core/operators/result/joint_relative_acceleration.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class joint_relative_acceleration(Operator): r"""Read/compute joint relative acceleration by calling the readers defined @@ -251,35 +262,37 @@ class InputsJointRelativeAcceleration(_Inputs): def __init__(self, op: Operator): super().__init__(joint_relative_acceleration._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( joint_relative_acceleration._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( joint_relative_acceleration._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( joint_relative_acceleration._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( joint_relative_acceleration._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( joint_relative_acceleration._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( joint_relative_acceleration._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(joint_relative_acceleration._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + joint_relative_acceleration._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsJointRelativeAcceleration(_Outputs): def __init__(self, op: Operator): super().__init__(joint_relative_acceleration._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( joint_relative_acceleration._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/joint_relative_angular_acceleration.py b/src/ansys/dpf/core/operators/result/joint_relative_angular_acceleration.py index 6b92decbd80..dc70a6088b1 100644 --- a/src/ansys/dpf/core/operators/result/joint_relative_angular_acceleration.py +++ b/src/ansys/dpf/core/operators/result/joint_relative_angular_acceleration.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class joint_relative_angular_acceleration(Operator): r"""Read/compute joint relative angular acceleration by calling the readers @@ -251,37 +262,37 @@ class InputsJointRelativeAngularAcceleration(_Inputs): def __init__(self, op: Operator): super().__init__(joint_relative_angular_acceleration._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( joint_relative_angular_acceleration._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( joint_relative_angular_acceleration._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( joint_relative_angular_acceleration._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( joint_relative_angular_acceleration._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( joint_relative_angular_acceleration._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( joint_relative_angular_acceleration._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( joint_relative_angular_acceleration._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -302,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -323,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -344,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -365,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -386,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -407,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -442,13 +453,13 @@ class OutputsJointRelativeAngularAcceleration(_Outputs): def __init__(self, op: Operator): super().__init__(joint_relative_angular_acceleration._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( joint_relative_angular_acceleration._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/joint_relative_angular_velocity.py b/src/ansys/dpf/core/operators/result/joint_relative_angular_velocity.py index e4e4dc1e6df..a7a57171ebc 100644 --- a/src/ansys/dpf/core/operators/result/joint_relative_angular_velocity.py +++ b/src/ansys/dpf/core/operators/result/joint_relative_angular_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class joint_relative_angular_velocity(Operator): r"""Read/compute joint relative angular velocity by calling the readers @@ -251,37 +262,37 @@ class InputsJointRelativeAngularVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(joint_relative_angular_velocity._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( joint_relative_angular_velocity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( joint_relative_angular_velocity._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( joint_relative_angular_velocity._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( joint_relative_angular_velocity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( joint_relative_angular_velocity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( joint_relative_angular_velocity._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( joint_relative_angular_velocity._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -302,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -323,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -344,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -365,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -386,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -407,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -442,13 +453,13 @@ class OutputsJointRelativeAngularVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(joint_relative_angular_velocity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( joint_relative_angular_velocity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/joint_relative_displacement.py b/src/ansys/dpf/core/operators/result/joint_relative_displacement.py index 10eb3382322..b2f11103c39 100644 --- a/src/ansys/dpf/core/operators/result/joint_relative_displacement.py +++ b/src/ansys/dpf/core/operators/result/joint_relative_displacement.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class joint_relative_displacement(Operator): r"""Read/compute joint relative displacement by calling the readers defined @@ -251,35 +262,37 @@ class InputsJointRelativeDisplacement(_Inputs): def __init__(self, op: Operator): super().__init__(joint_relative_displacement._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( joint_relative_displacement._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( joint_relative_displacement._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( joint_relative_displacement._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( joint_relative_displacement._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( joint_relative_displacement._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( joint_relative_displacement._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(joint_relative_displacement._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + joint_relative_displacement._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsJointRelativeDisplacement(_Outputs): def __init__(self, op: Operator): super().__init__(joint_relative_displacement._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( joint_relative_displacement._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/joint_relative_rotation.py b/src/ansys/dpf/core/operators/result/joint_relative_rotation.py index c03c51bc1ab..e4e8eb8d7a0 100644 --- a/src/ansys/dpf/core/operators/result/joint_relative_rotation.py +++ b/src/ansys/dpf/core/operators/result/joint_relative_rotation.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class joint_relative_rotation(Operator): r"""Read/compute joint relative rotation by calling the readers defined by @@ -251,35 +262,37 @@ class InputsJointRelativeRotation(_Inputs): def __init__(self, op: Operator): super().__init__(joint_relative_rotation._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( joint_relative_rotation._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( joint_relative_rotation._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( joint_relative_rotation._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( joint_relative_rotation._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( joint_relative_rotation._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( joint_relative_rotation._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(joint_relative_rotation._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + joint_relative_rotation._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsJointRelativeRotation(_Outputs): def __init__(self, op: Operator): super().__init__(joint_relative_rotation._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( joint_relative_rotation._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/joint_relative_velocity.py b/src/ansys/dpf/core/operators/result/joint_relative_velocity.py index aa8f21c9b2e..27ba53ccfcc 100644 --- a/src/ansys/dpf/core/operators/result/joint_relative_velocity.py +++ b/src/ansys/dpf/core/operators/result/joint_relative_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class joint_relative_velocity(Operator): r"""Read/compute joint relative velocity by calling the readers defined by @@ -251,35 +262,37 @@ class InputsJointRelativeVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(joint_relative_velocity._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( joint_relative_velocity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( joint_relative_velocity._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( joint_relative_velocity._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( joint_relative_velocity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( joint_relative_velocity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( joint_relative_velocity._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(joint_relative_velocity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + joint_relative_velocity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsJointRelativeVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(joint_relative_velocity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( joint_relative_velocity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/kinetic_energy.py b/src/ansys/dpf/core/operators/result/kinetic_energy.py index 4905aa55bce..66d9bd056a4 100644 --- a/src/ansys/dpf/core/operators/result/kinetic_energy.py +++ b/src/ansys/dpf/core/operators/result/kinetic_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class kinetic_energy(Operator): r"""Read/compute kinetic energy by calling the readers defined by the @@ -315,37 +326,53 @@ class InputsKineticEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(kinetic_energy._spec().inputs, op) - self._time_scoping = Input(kinetic_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + kinetic_energy._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(kinetic_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + kinetic_energy._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(kinetic_energy._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + kinetic_energy._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(kinetic_energy._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + kinetic_energy._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(kinetic_energy._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + kinetic_energy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( kinetic_energy._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(kinetic_energy._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + kinetic_energy._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(kinetic_energy._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + kinetic_energy._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( kinetic_energy._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( kinetic_energy._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(kinetic_energy._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + kinetic_energy._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -366,7 +393,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -387,7 +414,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -408,7 +435,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -429,7 +456,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -450,7 +477,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -471,7 +498,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -492,7 +519,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -513,7 +540,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -534,7 +561,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -555,7 +582,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -590,11 +617,13 @@ class OutputsKineticEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(kinetic_energy._spec().outputs, op) - self._fields_container = Output(kinetic_energy._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + kinetic_energy._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/layer_orientation_provider.py b/src/ansys/dpf/core/operators/result/layer_orientation_provider.py index 6792af1c0d6..129c3c0b317 100644 --- a/src/ansys/dpf/core/operators/result/layer_orientation_provider.py +++ b/src/ansys/dpf/core/operators/result/layer_orientation_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class layer_orientation_provider(Operator): r"""Read the layer orientations. @@ -159,17 +165,17 @@ class InputsLayerOrientationProvider(_Inputs): def __init__(self, op: Operator): super().__init__(layer_orientation_provider._spec().inputs, op) - self._streams = Input( + self._streams: Input[StreamsContainer] = Input( layer_orientation_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( layer_orientation_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Result file container allowed to be kept open to cache data. @@ -190,7 +196,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Result file path container, used if no streams are set. @@ -225,13 +231,13 @@ class OutputsLayerOrientationProvider(_Outputs): def __init__(self, op: Operator): super().__init__(layer_orientation_provider._spec().outputs, op) - self._layer_orientation_data = Output( + self._layer_orientation_data: Output[FieldsContainer] = Output( layer_orientation_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._layer_orientation_data) @property - def layer_orientation_data(self) -> Output: + def layer_orientation_data(self) -> Output[FieldsContainer]: r"""Allows to get layer_orientation_data output of the operator Requested data as FieldsContainer. diff --git a/src/ansys/dpf/core/operators/result/mach_number.py b/src/ansys/dpf/core/operators/result/mach_number.py index deecc36cfb9..ad520018f01 100644 --- a/src/ansys/dpf/core/operators/result/mach_number.py +++ b/src/ansys/dpf/core/operators/result/mach_number.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class mach_number(Operator): r"""Read Mach Number by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsMachNumber(_Inputs): def __init__(self, op: Operator): super().__init__(mach_number._spec().inputs, op) - self._time_scoping = Input(mach_number._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + mach_number._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(mach_number._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + mach_number._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(mach_number._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + mach_number._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(mach_number._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + mach_number._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(mach_number._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + mach_number._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(mach_number._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + mach_number._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(mach_number._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + mach_number._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(mach_number._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + mach_number._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsMachNumber(_Outputs): def __init__(self, op: Operator): super().__init__(mach_number._spec().outputs, op) - self._fields_container = Output(mach_number._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + mach_number._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_field.py b/src/ansys/dpf/core/operators/result/magnetic_field.py index 616ca77f083..995c1d38854 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_field.py +++ b/src/ansys/dpf/core/operators/result/magnetic_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_field(Operator): r"""Read/compute Magnetic Field by calling the readers defined by the @@ -555,37 +566,57 @@ class InputsMagneticField(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_field._spec().inputs, op) - self._time_scoping = Input(magnetic_field._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + magnetic_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(magnetic_field._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + magnetic_field._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(magnetic_field._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + magnetic_field._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(magnetic_field._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + magnetic_field._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(magnetic_field._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + magnetic_field._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_field._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_field._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_field._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(magnetic_field._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + magnetic_field._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_beams = Input(magnetic_field._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + magnetic_field._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(magnetic_field._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + magnetic_field._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(magnetic_field._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + magnetic_field._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( magnetic_field._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -606,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -627,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -648,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -669,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -690,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -711,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -732,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -753,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -774,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -795,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -816,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -851,11 +882,13 @@ class OutputsMagneticField(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_field._spec().outputs, op) - self._fields_container = Output(magnetic_field._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + magnetic_field._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_field_X.py b/src/ansys/dpf/core/operators/result/magnetic_field_X.py index 0db9982b85c..3afa4692996 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_field_X.py +++ b/src/ansys/dpf/core/operators/result/magnetic_field_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_field_X(Operator): r"""Read/compute Magnetic Field X component of the vector (1st component) by @@ -303,35 +314,49 @@ class InputsMagneticFieldX(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_field_X._spec().inputs, op) - self._time_scoping = Input(magnetic_field_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + magnetic_field_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(magnetic_field_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + magnetic_field_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(magnetic_field_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + magnetic_field_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( magnetic_field_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(magnetic_field_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + magnetic_field_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_field_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_field_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_field_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( magnetic_field_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(magnetic_field_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + magnetic_field_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(magnetic_field_X._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + magnetic_field_X._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsMagneticFieldX(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_field_X._spec().outputs, op) - self._fields_container = Output(magnetic_field_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + magnetic_field_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_field_Y.py b/src/ansys/dpf/core/operators/result/magnetic_field_Y.py index 8c1a46026c8..58a284d14b5 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_field_Y.py +++ b/src/ansys/dpf/core/operators/result/magnetic_field_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_field_Y(Operator): r"""Read/compute Magnetic Field Y component of the vector (2nd component) by @@ -303,35 +314,49 @@ class InputsMagneticFieldY(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_field_Y._spec().inputs, op) - self._time_scoping = Input(magnetic_field_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + magnetic_field_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(magnetic_field_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + magnetic_field_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(magnetic_field_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + magnetic_field_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( magnetic_field_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(magnetic_field_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + magnetic_field_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_field_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_field_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_field_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( magnetic_field_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(magnetic_field_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + magnetic_field_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(magnetic_field_Y._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + magnetic_field_Y._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsMagneticFieldY(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_field_Y._spec().outputs, op) - self._fields_container = Output(magnetic_field_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + magnetic_field_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_field_Z.py b/src/ansys/dpf/core/operators/result/magnetic_field_Z.py index 0ee2d057182..9f906b5e314 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_field_Z.py +++ b/src/ansys/dpf/core/operators/result/magnetic_field_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_field_Z(Operator): r"""Read/compute Magnetic Field Z component of the vector (3rd component) by @@ -303,35 +314,49 @@ class InputsMagneticFieldZ(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_field_Z._spec().inputs, op) - self._time_scoping = Input(magnetic_field_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + magnetic_field_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(magnetic_field_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + magnetic_field_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(magnetic_field_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + magnetic_field_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( magnetic_field_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(magnetic_field_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + magnetic_field_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_field_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_field_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_field_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( magnetic_field_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(magnetic_field_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + magnetic_field_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(magnetic_field_Z._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + magnetic_field_Z._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsMagneticFieldZ(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_field_Z._spec().outputs, op) - self._fields_container = Output(magnetic_field_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + magnetic_field_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_flux_density.py b/src/ansys/dpf/core/operators/result/magnetic_flux_density.py index 6b1111c3b4a..7657faf2185 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_flux_density.py +++ b/src/ansys/dpf/core/operators/result/magnetic_flux_density.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_flux_density(Operator): r"""Read/compute Magnetic Flux Density by calling the readers defined by the @@ -555,55 +566,57 @@ class InputsMagneticFluxDensity(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_flux_density._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( magnetic_flux_density._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( magnetic_flux_density._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( magnetic_flux_density._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( magnetic_flux_density._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( magnetic_flux_density._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_flux_density._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_flux_density._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_flux_density._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( magnetic_flux_density._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( magnetic_flux_density._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( magnetic_flux_density._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( magnetic_flux_density._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( magnetic_flux_density._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -624,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -645,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -666,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -687,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -708,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -729,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -750,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -771,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -792,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -813,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -834,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -869,13 +882,13 @@ class OutputsMagneticFluxDensity(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_flux_density._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( magnetic_flux_density._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_flux_density_X.py b/src/ansys/dpf/core/operators/result/magnetic_flux_density_X.py index 91e7c624d18..f9a28fe183a 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_flux_density_X.py +++ b/src/ansys/dpf/core/operators/result/magnetic_flux_density_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_flux_density_X(Operator): r"""Read/compute Magnetic Flux Density X component of the vector (1st @@ -303,47 +314,49 @@ class InputsMagneticFluxDensityX(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_flux_density_X._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( magnetic_flux_density_X._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( magnetic_flux_density_X._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( magnetic_flux_density_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( magnetic_flux_density_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( magnetic_flux_density_X._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_flux_density_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_flux_density_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_flux_density_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( magnetic_flux_density_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( magnetic_flux_density_X._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( magnetic_flux_density_X._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -406,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -469,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -490,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -511,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -532,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -567,13 +580,13 @@ class OutputsMagneticFluxDensityX(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_flux_density_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( magnetic_flux_density_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_flux_density_Y.py b/src/ansys/dpf/core/operators/result/magnetic_flux_density_Y.py index f3bbe265ce1..c6bf09d0ed2 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_flux_density_Y.py +++ b/src/ansys/dpf/core/operators/result/magnetic_flux_density_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_flux_density_Y(Operator): r"""Read/compute Magnetic Flux Density Y component of the vector (2nd @@ -303,47 +314,49 @@ class InputsMagneticFluxDensityY(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_flux_density_Y._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( magnetic_flux_density_Y._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( magnetic_flux_density_Y._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( magnetic_flux_density_Y._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( magnetic_flux_density_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( magnetic_flux_density_Y._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_flux_density_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_flux_density_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_flux_density_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( magnetic_flux_density_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( magnetic_flux_density_Y._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( magnetic_flux_density_Y._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -406,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -469,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -490,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -511,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -532,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -567,13 +580,13 @@ class OutputsMagneticFluxDensityY(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_flux_density_Y._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( magnetic_flux_density_Y._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_flux_density_Z.py b/src/ansys/dpf/core/operators/result/magnetic_flux_density_Z.py index 23b298ba0fb..a14b13bc889 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_flux_density_Z.py +++ b/src/ansys/dpf/core/operators/result/magnetic_flux_density_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_flux_density_Z(Operator): r"""Read/compute Magnetic Flux Density Z component of the vector (3rd @@ -303,47 +314,49 @@ class InputsMagneticFluxDensityZ(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_flux_density_Z._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( magnetic_flux_density_Z._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( magnetic_flux_density_Z._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( magnetic_flux_density_Z._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( magnetic_flux_density_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( magnetic_flux_density_Z._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_flux_density_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_flux_density_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_flux_density_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( magnetic_flux_density_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( magnetic_flux_density_Z._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( magnetic_flux_density_Z._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -364,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -385,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -406,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -427,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -448,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -469,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -490,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -511,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -532,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -567,13 +580,13 @@ class OutputsMagneticFluxDensityZ(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_flux_density_Z._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( magnetic_flux_density_Z._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_scalar_potential.py b/src/ansys/dpf/core/operators/result/magnetic_scalar_potential.py index c89787fc883..47d27c5635c 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_scalar_potential.py +++ b/src/ansys/dpf/core/operators/result/magnetic_scalar_potential.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_scalar_potential(Operator): r"""Read/compute Magnetic Scalar Potential by calling the readers defined by @@ -251,35 +262,37 @@ class InputsMagneticScalarPotential(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_scalar_potential._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( magnetic_scalar_potential._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( magnetic_scalar_potential._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( magnetic_scalar_potential._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( magnetic_scalar_potential._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( magnetic_scalar_potential._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_scalar_potential._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_scalar_potential._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_scalar_potential._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsMagneticScalarPotential(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_scalar_potential._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( magnetic_scalar_potential._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/magnetic_vector_potential.py b/src/ansys/dpf/core/operators/result/magnetic_vector_potential.py index abb43a8d13d..c60bd4986a1 100644 --- a/src/ansys/dpf/core/operators/result/magnetic_vector_potential.py +++ b/src/ansys/dpf/core/operators/result/magnetic_vector_potential.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class magnetic_vector_potential(Operator): r"""Read/compute Magnetic Vector Potential by calling the readers defined by @@ -251,35 +262,37 @@ class InputsMagneticVectorPotential(_Inputs): def __init__(self, op: Operator): super().__init__(magnetic_vector_potential._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( magnetic_vector_potential._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( magnetic_vector_potential._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( magnetic_vector_potential._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( magnetic_vector_potential._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( magnetic_vector_potential._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( magnetic_vector_potential._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(magnetic_vector_potential._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + magnetic_vector_potential._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsMagneticVectorPotential(_Outputs): def __init__(self, op: Operator): super().__init__(magnetic_vector_potential._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( magnetic_vector_potential._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/mapdl_material_properties.py b/src/ansys/dpf/core/operators/result/mapdl_material_properties.py index 14dd78c64e2..5ccadc2cf18 100644 --- a/src/ansys/dpf/core/operators/result/mapdl_material_properties.py +++ b/src/ansys/dpf/core/operators/result/mapdl_material_properties.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.streams_container import StreamsContainer + class mapdl_material_properties(Operator): r"""Read the values of the properties of a material for a given materials @@ -213,25 +220,25 @@ class InputsMapdlMaterialProperties(_Inputs): def __init__(self, op: Operator): super().__init__(mapdl_material_properties._spec().inputs, op) - self._properties_name = Input( + self._properties_name: Input[str] = Input( mapdl_material_properties._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._properties_name) - self._materials = Input( + self._materials: Input[PropertyField] = Input( mapdl_material_properties._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._materials) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( mapdl_material_properties._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( mapdl_material_properties._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def properties_name(self) -> Input: + def properties_name(self) -> Input[str]: r"""Allows to connect properties_name input to the operator. Returns @@ -250,7 +257,7 @@ def properties_name(self) -> Input: return self._properties_name @property - def materials(self) -> Input: + def materials(self) -> Input[PropertyField]: r"""Allows to connect materials input to the operator. Property field that contains a material id per element. @@ -271,7 +278,7 @@ def materials(self) -> Input: return self._materials @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -290,7 +297,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -323,13 +330,13 @@ class OutputsMapdlMaterialProperties(_Outputs): def __init__(self, op: Operator): super().__init__(mapdl_material_properties._spec().outputs, op) - self._properties_value = Output( + self._properties_value: Output[FieldsContainer] = Output( mapdl_material_properties._spec().output_pin(0), 0, op ) self._outputs.append(self._properties_value) @property - def properties_value(self) -> Output: + def properties_value(self) -> Output[FieldsContainer]: r"""Allows to get properties_value output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/mapdl_section.py b/src/ansys/dpf/core/operators/result/mapdl_section.py index 10ec3af0bde..194c5e6c387 100644 --- a/src/ansys/dpf/core/operators/result/mapdl_section.py +++ b/src/ansys/dpf/core/operators/result/mapdl_section.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.streams_container import StreamsContainer + class mapdl_section(Operator): r"""Read the values of the section properties for a given section property @@ -240,21 +247,33 @@ class InputsMapdlSection(_Inputs): def __init__(self, op: Operator): super().__init__(mapdl_section._spec().inputs, op) - self._properties_name = Input(mapdl_section._spec().input_pin(0), 0, op, -1) + self._properties_name: Input[str] = Input( + mapdl_section._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._properties_name) - self._section = Input(mapdl_section._spec().input_pin(1), 1, op, -1) + self._section: Input[PropertyField] = Input( + mapdl_section._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._section) - self._streams_container = Input(mapdl_section._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + mapdl_section._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(mapdl_section._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + mapdl_section._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._layer_property = Input(mapdl_section._spec().input_pin(5), 5, op, -1) + self._layer_property: Input[bool] = Input( + mapdl_section._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._layer_property) - self._layers_requested = Input(mapdl_section._spec().input_pin(6), 6, op, -1) + self._layers_requested: Input = Input( + mapdl_section._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._layers_requested) @property - def properties_name(self) -> Input: + def properties_name(self) -> Input[str]: r"""Allows to connect properties_name input to the operator. Returns @@ -273,7 +292,7 @@ def properties_name(self) -> Input: return self._properties_name @property - def section(self) -> Input: + def section(self) -> Input[PropertyField]: r"""Allows to connect section input to the operator. Property field that contains a section id per element.(optional) @@ -294,7 +313,7 @@ def section(self) -> Input: return self._section @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -313,7 +332,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -332,7 +351,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def layer_property(self) -> Input: + def layer_property(self) -> Input[bool]: r"""Allows to connect layer_property input to the operator. Property requested is layered. Default = False. @@ -389,13 +408,17 @@ class OutputsMapdlSection(_Outputs): def __init__(self, op: Operator): super().__init__(mapdl_section._spec().outputs, op) - self._properties_value = Output(mapdl_section._spec().output_pin(0), 0, op) + self._properties_value: Output[FieldsContainer] = Output( + mapdl_section._spec().output_pin(0), 0, op + ) self._outputs.append(self._properties_value) - self._layers_per_section = Output(mapdl_section._spec().output_pin(1), 1, op) + self._layers_per_section: Output[PropertyField] = Output( + mapdl_section._spec().output_pin(1), 1, op + ) self._outputs.append(self._layers_per_section) @property - def properties_value(self) -> Output: + def properties_value(self) -> Output[FieldsContainer]: r"""Allows to get properties_value output of the operator Returns @@ -413,7 +436,7 @@ def properties_value(self) -> Output: return self._properties_value @property - def layers_per_section(self) -> Output: + def layers_per_section(self) -> Output[PropertyField]: r"""Allows to get layers_per_section output of the operator Only available if layer_property option is set to True. diff --git a/src/ansys/dpf/core/operators/result/mapdl_split_on_facet_indices.py b/src/ansys/dpf/core/operators/result/mapdl_split_on_facet_indices.py index 3aac534d516..fa860699a67 100644 --- a/src/ansys/dpf/core/operators/result/mapdl_split_on_facet_indices.py +++ b/src/ansys/dpf/core/operators/result/mapdl_split_on_facet_indices.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.scoping import Scoping + class mapdl_split_on_facet_indices(Operator): r"""Splits each Field in a FieldsContainer defined on the skin elements of a @@ -245,33 +252,33 @@ class InputsMapdlSplitOnFacetIndices(_Inputs): def __init__(self, op: Operator): super().__init__(mapdl_split_on_facet_indices._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( mapdl_split_on_facet_indices._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._property_field_new_elements_to_old = Input( + self._property_field_new_elements_to_old: Input[PropertyField] = Input( mapdl_split_on_facet_indices._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._property_field_new_elements_to_old) - self._facet_indices = Input( + self._facet_indices: Input[PropertyField] = Input( mapdl_split_on_facet_indices._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._facet_indices) - self._volume_mesh = Input( + self._volume_mesh: Input[MeshedRegion] = Input( mapdl_split_on_facet_indices._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._volume_mesh) - self._degenerated_tets = Input( + self._degenerated_tets: Input[Scoping] = Input( mapdl_split_on_facet_indices._spec().input_pin(185), 185, op, -1 ) self._inputs.append(self._degenerated_tets) - self._non_degenerated_tets = Input( + self._non_degenerated_tets: Input[Scoping] = Input( mapdl_split_on_facet_indices._spec().input_pin(285), 285, op, -1 ) self._inputs.append(self._non_degenerated_tets) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container to split, with generic number of labels (e.g. time, zone, complex...), and the Fields of the FieldsContainer will have location Elemental and the Scoping Ids will be the Element Ids on the skin mesh. @@ -292,7 +299,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def property_field_new_elements_to_old(self) -> Input: + def property_field_new_elements_to_old(self) -> Input[PropertyField]: r"""Allows to connect property_field_new_elements_to_old input to the operator. This property field provides, for each new face element ID (in the scoping), the corresponding 3D volume element index (in the data) it has been extracted from. The 3D volume element ID can be found with the element scoping of the input mesh. @@ -313,7 +320,7 @@ def property_field_new_elements_to_old(self) -> Input: return self._property_field_new_elements_to_old @property - def facet_indices(self) -> Input: + def facet_indices(self) -> Input[PropertyField]: r"""Allows to connect facet_indices input to the operator. This property field gives, for each new face element ID (in the scoping), the corresponding face index on the source 3D volume element. The 3D volume element can be extracted from the previous output. @@ -334,7 +341,7 @@ def facet_indices(self) -> Input: return self._facet_indices @property - def volume_mesh(self) -> Input: + def volume_mesh(self) -> Input[MeshedRegion]: r"""Allows to connect volume_mesh input to the operator. The solid support. @@ -355,7 +362,7 @@ def volume_mesh(self) -> Input: return self._volume_mesh @property - def degenerated_tets(self) -> Input: + def degenerated_tets(self) -> Input[Scoping]: r"""Allows to connect degenerated_tets input to the operator. Elemental scoping of tet elements. If connected, the tets in the scoping are treated as degenerated tets (SOLID185), and the rest as non-degenerated tets (SOLID285). Pins 185 and 285 are mutually exclusionary (they cannot be connected at the same time), and if none of them is connected, all tets are treated as non-degenerated (SOLID285). @@ -376,7 +383,7 @@ def degenerated_tets(self) -> Input: return self._degenerated_tets @property - def non_degenerated_tets(self) -> Input: + def non_degenerated_tets(self) -> Input[Scoping]: r"""Allows to connect non_degenerated_tets input to the operator. Elemental scoping of tet elements. If connected, the tets in the scoping are treated as non-degenerated tets (SOLID285), and the rest as degenerated tets (SOLID185). Pins 185 and 285 are mutually exclusionary (they cannot be connected at the same time), and if none of them is connected, all tets are treated as non-degenerated (SOLID285). @@ -411,13 +418,13 @@ class OutputsMapdlSplitOnFacetIndices(_Outputs): def __init__(self, op: Operator): super().__init__(mapdl_split_on_facet_indices._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( mapdl_split_on_facet_indices._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Output splitted fields containter diff --git a/src/ansys/dpf/core/operators/result/mapdl_split_to_acmo_facet_indices.py b/src/ansys/dpf/core/operators/result/mapdl_split_to_acmo_facet_indices.py index 1f39c35da55..d966b144f18 100644 --- a/src/ansys/dpf/core/operators/result/mapdl_split_to_acmo_facet_indices.py +++ b/src/ansys/dpf/core/operators/result/mapdl_split_to_acmo_facet_indices.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class mapdl_split_to_acmo_facet_indices(Operator): r"""This will retain the already existing labels from the input FC and will @@ -175,17 +179,17 @@ class InputsMapdlSplitToAcmoFacetIndices(_Inputs): def __init__(self, op: Operator): super().__init__(mapdl_split_to_acmo_facet_indices._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( mapdl_split_to_acmo_facet_indices._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._property_fields_container_element_types = Input( + self._property_fields_container_element_types: Input = Input( mapdl_split_to_acmo_facet_indices._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._property_fields_container_element_types) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container to split, with generic number of labels (e.g. time, zone, complex...), 'facet' label is compulsory.The Fields of the FieldsContainer will have location Elemental and the Scoping Ids will be the Element Ids on the skin mesh. @@ -241,13 +245,13 @@ class OutputsMapdlSplitToAcmoFacetIndices(_Outputs): def __init__(self, op: Operator): super().__init__(mapdl_split_to_acmo_facet_indices._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( mapdl_split_to_acmo_facet_indices._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Output splitted fields containter diff --git a/src/ansys/dpf/core/operators/result/mass_flow_rate.py b/src/ansys/dpf/core/operators/result/mass_flow_rate.py index a7d28472179..b23c151f52c 100644 --- a/src/ansys/dpf/core/operators/result/mass_flow_rate.py +++ b/src/ansys/dpf/core/operators/result/mass_flow_rate.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class mass_flow_rate(Operator): r"""Read Mass Flow Rate by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsMassFlowRate(_Inputs): def __init__(self, op: Operator): super().__init__(mass_flow_rate._spec().inputs, op) - self._time_scoping = Input(mass_flow_rate._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + mass_flow_rate._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(mass_flow_rate._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + mass_flow_rate._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(mass_flow_rate._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + mass_flow_rate._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(mass_flow_rate._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + mass_flow_rate._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(mass_flow_rate._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + mass_flow_rate._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(mass_flow_rate._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + mass_flow_rate._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(mass_flow_rate._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + mass_flow_rate._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(mass_flow_rate._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + mass_flow_rate._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsMassFlowRate(_Outputs): def __init__(self, op: Operator): super().__init__(mass_flow_rate._spec().outputs, op) - self._fields_container = Output(mass_flow_rate._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + mass_flow_rate._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/mass_fraction.py b/src/ansys/dpf/core/operators/result/mass_fraction.py index 4aabee28429..c515c13e77f 100644 --- a/src/ansys/dpf/core/operators/result/mass_fraction.py +++ b/src/ansys/dpf/core/operators/result/mass_fraction.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class mass_fraction(Operator): r"""Read Mass Fraction by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsMassFraction(_Inputs): def __init__(self, op: Operator): super().__init__(mass_fraction._spec().inputs, op) - self._time_scoping = Input(mass_fraction._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + mass_fraction._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(mass_fraction._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + mass_fraction._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(mass_fraction._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + mass_fraction._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(mass_fraction._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + mass_fraction._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(mass_fraction._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + mass_fraction._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(mass_fraction._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + mass_fraction._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(mass_fraction._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + mass_fraction._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(mass_fraction._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + mass_fraction._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsMassFraction(_Outputs): def __init__(self, op: Operator): super().__init__(mass_fraction._spec().outputs, op) - self._fields_container = Output(mass_fraction._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + mass_fraction._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/material_property_of_element.py b/src/ansys/dpf/core/operators/result/material_property_of_element.py index f6ad48e9f51..f74ad8c4b1e 100644 --- a/src/ansys/dpf/core/operators/result/material_property_of_element.py +++ b/src/ansys/dpf/core/operators/result/material_property_of_element.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.streams_container import StreamsContainer + class material_property_of_element(Operator): r"""Loads the appropriate operator based on the data sources and retrieves @@ -159,17 +165,17 @@ class InputsMaterialPropertyOfElement(_Inputs): def __init__(self, op: Operator): super().__init__(material_property_of_element._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( material_property_of_element._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( material_property_of_element._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -188,7 +194,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -221,13 +227,13 @@ class OutputsMaterialPropertyOfElement(_Outputs): def __init__(self, op: Operator): super().__init__(material_property_of_element._spec().outputs, op) - self._material_properties = Output( + self._material_properties: Output[Field] = Output( material_property_of_element._spec().output_pin(0), 0, op ) self._outputs.append(self._material_properties) @property - def material_properties(self) -> Output: + def material_properties(self) -> Output[Field]: r"""Allows to get material_properties output of the operator material properties diff --git a/src/ansys/dpf/core/operators/result/mean_static_pressure.py b/src/ansys/dpf/core/operators/result/mean_static_pressure.py index 1ab2622b903..28573e922d4 100644 --- a/src/ansys/dpf/core/operators/result/mean_static_pressure.py +++ b/src/ansys/dpf/core/operators/result/mean_static_pressure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class mean_static_pressure(Operator): r"""Read Mean Static Pressure by calling the readers defined by the @@ -267,33 +278,41 @@ class InputsMeanStaticPressure(_Inputs): def __init__(self, op: Operator): super().__init__(mean_static_pressure._spec().inputs, op) - self._time_scoping = Input(mean_static_pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + mean_static_pressure._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(mean_static_pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + mean_static_pressure._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( mean_static_pressure._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(mean_static_pressure._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + mean_static_pressure._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(mean_static_pressure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + mean_static_pressure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( mean_static_pressure._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( mean_static_pressure._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( mean_static_pressure._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,13 +494,13 @@ class OutputsMeanStaticPressure(_Outputs): def __init__(self, op: Operator): super().__init__(mean_static_pressure._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( mean_static_pressure._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/mean_temperature.py b/src/ansys/dpf/core/operators/result/mean_temperature.py index ce53db0cc8e..1c78c4a7c77 100644 --- a/src/ansys/dpf/core/operators/result/mean_temperature.py +++ b/src/ansys/dpf/core/operators/result/mean_temperature.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class mean_temperature(Operator): r"""Read Mean Temperature by calling the readers defined by the datasources. @@ -265,27 +276,41 @@ class InputsMeanTemperature(_Inputs): def __init__(self, op: Operator): super().__init__(mean_temperature._spec().inputs, op) - self._time_scoping = Input(mean_temperature._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + mean_temperature._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(mean_temperature._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + mean_temperature._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( mean_temperature._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(mean_temperature._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + mean_temperature._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(mean_temperature._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + mean_temperature._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(mean_temperature._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + mean_temperature._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(mean_temperature._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + mean_temperature._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(mean_temperature._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + mean_temperature._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -306,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -327,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -348,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -369,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -390,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -411,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -432,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -467,11 +492,13 @@ class OutputsMeanTemperature(_Outputs): def __init__(self, op: Operator): super().__init__(mean_temperature._spec().outputs, op) - self._fields_container = Output(mean_temperature._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + mean_temperature._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/mean_velocity.py b/src/ansys/dpf/core/operators/result/mean_velocity.py index a703f1d40e6..266a825bc30 100644 --- a/src/ansys/dpf/core/operators/result/mean_velocity.py +++ b/src/ansys/dpf/core/operators/result/mean_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class mean_velocity(Operator): r"""Read Mean Velocity by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsMeanVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(mean_velocity._spec().inputs, op) - self._time_scoping = Input(mean_velocity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + mean_velocity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(mean_velocity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + mean_velocity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(mean_velocity._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + mean_velocity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(mean_velocity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + mean_velocity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(mean_velocity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + mean_velocity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(mean_velocity._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + mean_velocity._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(mean_velocity._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + mean_velocity._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(mean_velocity._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + mean_velocity._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsMeanVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(mean_velocity._spec().outputs, op) - self._fields_container = Output(mean_velocity._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + mean_velocity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/members_in_bending_not_certified.py b/src/ansys/dpf/core/operators/result/members_in_bending_not_certified.py index 8ee6f736c3d..108006cf558 100644 --- a/src/ansys/dpf/core/operators/result/members_in_bending_not_certified.py +++ b/src/ansys/dpf/core/operators/result/members_in_bending_not_certified.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class members_in_bending_not_certified(Operator): r"""This operator is a non-certified example of buckling resistance @@ -299,45 +308,45 @@ class InputsMembersInBendingNotCertified(_Inputs): def __init__(self, op: Operator): super().__init__(members_in_bending_not_certified._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int] = Input( members_in_bending_not_certified._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._field_yield_strength = Input( + self._field_yield_strength: Input[Field] = Input( members_in_bending_not_certified._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._field_yield_strength) - self._class_cross_section = Input( + self._class_cross_section: Input[bool] = Input( members_in_bending_not_certified._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._class_cross_section) - self._streams = Input( + self._streams: Input[StreamsContainer] = Input( members_in_bending_not_certified._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( members_in_bending_not_certified._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._partial_factor = Input( + self._partial_factor: Input[float] = Input( members_in_bending_not_certified._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._partial_factor) - self._mesh = Input( + self._mesh: Input[MeshedRegion] = Input( members_in_bending_not_certified._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._bending_moment_y = Input( + self._bending_moment_y: Input[FieldsContainer] = Input( members_in_bending_not_certified._spec().input_pin(8), 8, op, -1 ) self._inputs.append(self._bending_moment_y) - self._bending_moment_z = Input( + self._bending_moment_z: Input[FieldsContainer] = Input( members_in_bending_not_certified._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._bending_moment_z) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int]: r"""Allows to connect time_scoping input to the operator. Returns @@ -356,7 +365,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def field_yield_strength(self) -> Input: + def field_yield_strength(self) -> Input[Field]: r"""Allows to connect field_yield_strength input to the operator. This pin contains field of beam's Yield Strength defined by the user. @@ -377,7 +386,7 @@ def field_yield_strength(self) -> Input: return self._field_yield_strength @property - def class_cross_section(self) -> Input: + def class_cross_section(self) -> Input[bool]: r"""Allows to connect class_cross_section input to the operator. Selection for a cross-section. True: Class 1 or 2 cross-sections. False: Class 3 cross section. If the user defines the cross section as class 1 or 2, the section modulus would be plastic section modulus. If it's class 3- cross section,the section modulus would be elastic section modulus @@ -398,7 +407,7 @@ def class_cross_section(self) -> Input: return self._class_cross_section @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. result file container allowed to be kept open to cache data. @@ -419,7 +428,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set. @@ -440,7 +449,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def partial_factor(self) -> Input: + def partial_factor(self) -> Input[float]: r"""Allows to connect partial_factor input to the operator. partial safety factor for resistance of members to instability assessed by member checks. Default value: 1. @@ -461,7 +470,7 @@ def partial_factor(self) -> Input: return self._partial_factor @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh containing beam's properties defined by user @@ -482,7 +491,7 @@ def mesh(self) -> Input: return self._mesh @property - def bending_moment_y(self) -> Input: + def bending_moment_y(self) -> Input[FieldsContainer]: r"""Allows to connect bending_moment_y input to the operator. Fields Container of bending moment on axis y defined by user @@ -503,7 +512,7 @@ def bending_moment_y(self) -> Input: return self._bending_moment_y @property - def bending_moment_z(self) -> Input: + def bending_moment_z(self) -> Input[FieldsContainer]: r"""Allows to connect bending_moment_z input to the operator. Fields Container of bending moment on axis z defined by user @@ -539,17 +548,17 @@ class OutputsMembersInBendingNotCertified(_Outputs): def __init__(self, op: Operator): super().__init__(members_in_bending_not_certified._spec().outputs, op) - self._buckling_resistance_bending_yy = Output( + self._buckling_resistance_bending_yy: Output[FieldsContainer] = Output( members_in_bending_not_certified._spec().output_pin(0), 0, op ) self._outputs.append(self._buckling_resistance_bending_yy) - self._buckling_resistance_bending_zz = Output( + self._buckling_resistance_bending_zz: Output[FieldsContainer] = Output( members_in_bending_not_certified._spec().output_pin(1), 1, op ) self._outputs.append(self._buckling_resistance_bending_zz) @property - def buckling_resistance_bending_yy(self) -> Output: + def buckling_resistance_bending_yy(self) -> Output[FieldsContainer]: r"""Allows to get buckling_resistance_bending_yy output of the operator Fields Container of buckling resistance factor on axis y-y in case of bending(M). These factors should be less than 1 and positive. @@ -569,7 +578,7 @@ def buckling_resistance_bending_yy(self) -> Output: return self._buckling_resistance_bending_yy @property - def buckling_resistance_bending_zz(self) -> Output: + def buckling_resistance_bending_zz(self) -> Output[FieldsContainer]: r"""Allows to get buckling_resistance_bending_zz output of the operator Fields Container of buckling resistance factor on axis z-z in case of bending(M). These factors should be less than 1 and positive. diff --git a/src/ansys/dpf/core/operators/result/members_in_compression_not_certified.py b/src/ansys/dpf/core/operators/result/members_in_compression_not_certified.py index 4960dab7241..16b0d57d737 100644 --- a/src/ansys/dpf/core/operators/result/members_in_compression_not_certified.py +++ b/src/ansys/dpf/core/operators/result/members_in_compression_not_certified.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class members_in_compression_not_certified(Operator): r"""This operator is a non-certified example of buckling resistance @@ -314,49 +323,49 @@ class InputsMembersInCompressionNotCertified(_Inputs): def __init__(self, op: Operator): super().__init__(members_in_compression_not_certified._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int] = Input( members_in_compression_not_certified._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._field_yield_strength = Input( + self._field_yield_strength: Input[DataSources | Field] = Input( members_in_compression_not_certified._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._field_yield_strength) - self._field_end_condition = Input( + self._field_end_condition: Input[DataSources | Field] = Input( members_in_compression_not_certified._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._field_end_condition) - self._streams = Input( + self._streams: Input[StreamsContainer] = Input( members_in_compression_not_certified._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( members_in_compression_not_certified._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._manufacture = Input( + self._manufacture: Input[bool] = Input( members_in_compression_not_certified._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._manufacture) - self._partial_factor = Input( + self._partial_factor: Input[float] = Input( members_in_compression_not_certified._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._partial_factor) - self._mesh = Input( + self._mesh: Input[MeshedRegion] = Input( members_in_compression_not_certified._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._axial_force = Input( + self._axial_force: Input[FieldsContainer] = Input( members_in_compression_not_certified._spec().input_pin(8), 8, op, -1 ) self._inputs.append(self._axial_force) - self._fabrication_type = Input( + self._fabrication_type: Input[bool] = Input( members_in_compression_not_certified._spec().input_pin(12), 12, op, -1 ) self._inputs.append(self._fabrication_type) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int]: r"""Allows to connect time_scoping input to the operator. time/freq set ids (use ints or scoping) @@ -377,7 +386,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def field_yield_strength(self) -> Input: + def field_yield_strength(self) -> Input[DataSources | Field]: r"""Allows to connect field_yield_strength input to the operator. This pin contains file csv or field of beam's Yield Strength. @@ -398,7 +407,7 @@ def field_yield_strength(self) -> Input: return self._field_yield_strength @property - def field_end_condition(self) -> Input: + def field_end_condition(self) -> Input[DataSources | Field]: r"""Allows to connect field_end_condition input to the operator. This pin contains file csv or field of beam's end condition defined by the user. If no input at this pin found, it would take end condition's value of all beams as 1. @@ -419,7 +428,7 @@ def field_end_condition(self) -> Input: return self._field_end_condition @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. result file container allowed to be kept open to cache data. @@ -440,7 +449,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set. @@ -461,7 +470,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def manufacture(self) -> Input: + def manufacture(self) -> Input[bool]: r"""Allows to connect manufacture input to the operator. Manufacturing processus:hot finished if TRUE or cold formed if FALSE. Default value : hot finished. @@ -482,7 +491,7 @@ def manufacture(self) -> Input: return self._manufacture @property - def partial_factor(self) -> Input: + def partial_factor(self) -> Input[float]: r"""Allows to connect partial_factor input to the operator. partial safety factor for resistance of members to instability assessed by member checks. Default value: 1. @@ -503,7 +512,7 @@ def partial_factor(self) -> Input: return self._partial_factor @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh containing beam's properties defined by user @@ -524,7 +533,7 @@ def mesh(self) -> Input: return self._mesh @property - def axial_force(self) -> Input: + def axial_force(self) -> Input[FieldsContainer]: r"""Allows to connect axial_force input to the operator. Fields Container of axial force defined by user @@ -545,7 +554,7 @@ def axial_force(self) -> Input: return self._axial_force @property - def fabrication_type(self) -> Input: + def fabrication_type(self) -> Input[bool]: r"""Allows to connect fabrication_type input to the operator. If there is beam I in the structure, please define its fabrication type. True: Rolled section, False: Welded section @@ -581,17 +590,17 @@ class OutputsMembersInCompressionNotCertified(_Outputs): def __init__(self, op: Operator): super().__init__(members_in_compression_not_certified._spec().outputs, op) - self._buckling_resistance_compression_yy = Output( + self._buckling_resistance_compression_yy: Output[FieldsContainer] = Output( members_in_compression_not_certified._spec().output_pin(0), 0, op ) self._outputs.append(self._buckling_resistance_compression_yy) - self._buckling_resistance_compression_zz = Output( + self._buckling_resistance_compression_zz: Output[FieldsContainer] = Output( members_in_compression_not_certified._spec().output_pin(1), 1, op ) self._outputs.append(self._buckling_resistance_compression_zz) @property - def buckling_resistance_compression_yy(self) -> Output: + def buckling_resistance_compression_yy(self) -> Output[FieldsContainer]: r"""Allows to get buckling_resistance_compression_yy output of the operator Fields Container of buckling resistance factor on axis y-y in case of compression. These factors should be less than 1 and positive. @@ -611,7 +620,7 @@ def buckling_resistance_compression_yy(self) -> Output: return self._buckling_resistance_compression_yy @property - def buckling_resistance_compression_zz(self) -> Output: + def buckling_resistance_compression_zz(self) -> Output[FieldsContainer]: r"""Allows to get buckling_resistance_compression_zz output of the operator Fields Container of buckling resistance factor on axis z-z in case of compression. These factors should be less than 1 and positive. diff --git a/src/ansys/dpf/core/operators/result/members_in_linear_compression_bending_not_certified.py b/src/ansys/dpf/core/operators/result/members_in_linear_compression_bending_not_certified.py index 2a9d7a12ed2..7f39f73be51 100644 --- a/src/ansys/dpf/core/operators/result/members_in_linear_compression_bending_not_certified.py +++ b/src/ansys/dpf/core/operators/result/members_in_linear_compression_bending_not_certified.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class members_in_linear_compression_bending_not_certified(Operator): r"""This operator is a non-certified example of buckling resistance @@ -358,91 +367,91 @@ def __init__(self, op: Operator): super().__init__( members_in_linear_compression_bending_not_certified._spec().inputs, op ) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(0), 0, op, -1, ) self._inputs.append(self._time_scoping) - self._field_yield_strength = Input( + self._field_yield_strength: Input[Field] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(1), 1, op, -1, ) self._inputs.append(self._field_yield_strength) - self._field_end_condition = Input( + self._field_end_condition: Input[DataSources | Field] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(2), 2, op, -1, ) self._inputs.append(self._field_end_condition) - self._streams = Input( + self._streams: Input[StreamsContainer] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(3), 3, op, -1, ) self._inputs.append(self._streams) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(4), 4, op, -1, ) self._inputs.append(self._data_sources) - self._manufacture = Input( + self._manufacture: Input[bool] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(5), 5, op, -1, ) self._inputs.append(self._manufacture) - self._partial_factor = Input( + self._partial_factor: Input[float] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(6), 6, op, -1, ) self._inputs.append(self._partial_factor) - self._mesh = Input( + self._mesh: Input[MeshedRegion] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(7), 7, op, -1, ) self._inputs.append(self._mesh) - self._bending_moment_y = Input( + self._bending_moment_y: Input[FieldsContainer] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(8), 8, op, -1, ) self._inputs.append(self._bending_moment_y) - self._bending_moment_z = Input( + self._bending_moment_z: Input[FieldsContainer] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(9), 9, op, -1, ) self._inputs.append(self._bending_moment_z) - self._axial_force = Input( + self._axial_force: Input[FieldsContainer] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(10), 10, op, -1, ) self._inputs.append(self._axial_force) - self._class_cross_section = Input( + self._class_cross_section: Input[bool] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(11), 11, op, -1, ) self._inputs.append(self._class_cross_section) - self._fabrication_type = Input( + self._fabrication_type: Input[bool] = Input( members_in_linear_compression_bending_not_certified._spec().input_pin(12), 12, op, @@ -451,7 +460,7 @@ def __init__(self, op: Operator): self._inputs.append(self._fabrication_type) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int]: r"""Allows to connect time_scoping input to the operator. Returns @@ -470,7 +479,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def field_yield_strength(self) -> Input: + def field_yield_strength(self) -> Input[Field]: r"""Allows to connect field_yield_strength input to the operator. This pin contains field of beam's Yield Strength defined by the user. @@ -491,7 +500,7 @@ def field_yield_strength(self) -> Input: return self._field_yield_strength @property - def field_end_condition(self) -> Input: + def field_end_condition(self) -> Input[DataSources | Field]: r"""Allows to connect field_end_condition input to the operator. This pin contains file csv or field of beam's end condition defined by the user. If no input at this pin found, it would take end conditions value of all beams as 1 @@ -512,7 +521,7 @@ def field_end_condition(self) -> Input: return self._field_end_condition @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. result file container allowed to be kept open to cache data. @@ -533,7 +542,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set. @@ -554,7 +563,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def manufacture(self) -> Input: + def manufacture(self) -> Input[bool]: r"""Allows to connect manufacture input to the operator. Manufacturing processus:hot finished if TRUE or cold formed if FALSE. Default value : hot finished. @@ -575,7 +584,7 @@ def manufacture(self) -> Input: return self._manufacture @property - def partial_factor(self) -> Input: + def partial_factor(self) -> Input[float]: r"""Allows to connect partial_factor input to the operator. partial factor for resistance of members to instability assessed by member checks. Default value: 1.0 @@ -596,7 +605,7 @@ def partial_factor(self) -> Input: return self._partial_factor @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh containing beam's properties defined by user @@ -617,7 +626,7 @@ def mesh(self) -> Input: return self._mesh @property - def bending_moment_y(self) -> Input: + def bending_moment_y(self) -> Input[FieldsContainer]: r"""Allows to connect bending_moment_y input to the operator. Fields Container of bending moment on axis y defined by user @@ -638,7 +647,7 @@ def bending_moment_y(self) -> Input: return self._bending_moment_y @property - def bending_moment_z(self) -> Input: + def bending_moment_z(self) -> Input[FieldsContainer]: r"""Allows to connect bending_moment_z input to the operator. Fields Container of bending moment on axis z defined by user @@ -659,7 +668,7 @@ def bending_moment_z(self) -> Input: return self._bending_moment_z @property - def axial_force(self) -> Input: + def axial_force(self) -> Input[FieldsContainer]: r"""Allows to connect axial_force input to the operator. Fields Container of axial force defined by user @@ -680,7 +689,7 @@ def axial_force(self) -> Input: return self._axial_force @property - def class_cross_section(self) -> Input: + def class_cross_section(self) -> Input[bool]: r"""Allows to connect class_cross_section input to the operator. Selection for a cross-section. True: Class 1 or 2 cross-sections. False: Class 3 cross section. If the user defines the cross section as class 1 or 2, the section modulus would be plastic section modulus. If it's class 3- cross section,the section modulus would be elastic section modulus @@ -701,7 +710,7 @@ def class_cross_section(self) -> Input: return self._class_cross_section @property - def fabrication_type(self) -> Input: + def fabrication_type(self) -> Input[bool]: r"""Allows to connect fabrication_type input to the operator. Selection of fabrication's type if there are beams I in the structure. TRUE: Rolled Section, False: Welded Section. Default: Rolled Section. @@ -738,7 +747,9 @@ def __init__(self, op: Operator): super().__init__( members_in_linear_compression_bending_not_certified._spec().outputs, op ) - self._buckling_resistance_linear_summation_utilization_ratios = Output( + self._buckling_resistance_linear_summation_utilization_ratios: Output[ + FieldsContainer + ] = Output( members_in_linear_compression_bending_not_certified._spec().output_pin(0), 0, op, @@ -748,7 +759,9 @@ def __init__(self, op: Operator): ) @property - def buckling_resistance_linear_summation_utilization_ratios(self) -> Output: + def buckling_resistance_linear_summation_utilization_ratios( + self, + ) -> Output[FieldsContainer]: r"""Allows to get buckling_resistance_linear_summation_utilization_ratios output of the operator Linear summation of the utilization ratios in all members submitted under a combination of both bending and compression. These factors should be less than 1 and positive. diff --git a/src/ansys/dpf/core/operators/result/migrate_to_h5dpf.py b/src/ansys/dpf/core/operators/result/migrate_to_h5dpf.py index 27fab3820c1..6679e7ec678 100644 --- a/src/ansys/dpf/core/operators/result/migrate_to_h5dpf.py +++ b/src/ansys/dpf/core/operators/result/migrate_to_h5dpf.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.data_tree import DataTree + from ansys.dpf.core.generic_data_container import GenericDataContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.workflow import Workflow + class migrate_to_h5dpf(Operator): r"""Read mesh properties from the results files contained in the streams or @@ -322,43 +330,53 @@ class InputsMigrateToH5Dpf(_Inputs): def __init__(self, op: Operator): super().__init__(migrate_to_h5dpf._spec().inputs, op) - self._h5_chunk_size = Input(migrate_to_h5dpf._spec().input_pin(-7), -7, op, -1) + self._h5_chunk_size: Input[int | GenericDataContainer] = Input( + migrate_to_h5dpf._spec().input_pin(-7), -7, op, -1 + ) self._inputs.append(self._h5_chunk_size) - self._dataset_size_compression_threshold = Input( - migrate_to_h5dpf._spec().input_pin(-5), -5, op, -1 + self._dataset_size_compression_threshold: Input[int | GenericDataContainer] = ( + Input(migrate_to_h5dpf._spec().input_pin(-5), -5, op, -1) ) self._inputs.append(self._dataset_size_compression_threshold) - self._h5_native_compression = Input( - migrate_to_h5dpf._spec().input_pin(-2), -2, op, -1 + self._h5_native_compression: Input[int | DataTree | GenericDataContainer] = ( + Input(migrate_to_h5dpf._spec().input_pin(-2), -2, op, -1) ) self._inputs.append(self._h5_native_compression) - self._export_floats = Input(migrate_to_h5dpf._spec().input_pin(-1), -1, op, -1) + self._export_floats: Input[bool | GenericDataContainer] = Input( + migrate_to_h5dpf._spec().input_pin(-1), -1, op, -1 + ) self._inputs.append(self._export_floats) - self._filename = Input(migrate_to_h5dpf._spec().input_pin(0), 0, op, -1) + self._filename: Input[str] = Input( + migrate_to_h5dpf._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._filename) - self._comma_separated_list_of_results = Input( + self._comma_separated_list_of_results: Input[str] = Input( migrate_to_h5dpf._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._comma_separated_list_of_results) - self._all_time_sets = Input(migrate_to_h5dpf._spec().input_pin(2), 2, op, -1) + self._all_time_sets: Input[bool] = Input( + migrate_to_h5dpf._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._all_time_sets) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( migrate_to_h5dpf._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(migrate_to_h5dpf._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + migrate_to_h5dpf._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._compression_workflow = Input( + self._compression_workflow: Input[Workflow | GenericDataContainer] = Input( migrate_to_h5dpf._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._compression_workflow) - self._filtering_workflow = Input( + self._filtering_workflow: Input[Workflow | GenericDataContainer] = Input( migrate_to_h5dpf._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._filtering_workflow) @property - def h5_chunk_size(self) -> Input: + def h5_chunk_size(self) -> Input[int | GenericDataContainer]: r"""Allows to connect h5_chunk_size input to the operator. Size of each HDF5 chunk in kilobytes (KB). Default: 1 MB when compression is enabled; for uncompressed datasets, the default is the full dataset size x dimension. @@ -379,7 +397,7 @@ def h5_chunk_size(self) -> Input: return self._h5_chunk_size @property - def dataset_size_compression_threshold(self) -> Input: + def dataset_size_compression_threshold(self) -> Input[int | GenericDataContainer]: r"""Allows to connect dataset_size_compression_threshold input to the operator. Integer value that defines the minimum dataset size (in bytes) to use h5 native compression Applicable for arrays of floats, doubles and integers. @@ -400,7 +418,7 @@ def dataset_size_compression_threshold(self) -> Input: return self._dataset_size_compression_threshold @property - def h5_native_compression(self) -> Input: + def h5_native_compression(self) -> Input[int | DataTree | GenericDataContainer]: r"""Allows to connect h5_native_compression input to the operator. Integer value / DataTree that defines the h5 native compression used For Integer Input {0: No Compression (default); 1-9: GZIP Compression : 9 provides maximum compression but at the slowest speed.}For DataTree Input {type: None / GZIP / ZSTD; level: GZIP (1-9) / ZSTD (1-20); num_threads: ZSTD (>0)} @@ -421,7 +439,7 @@ def h5_native_compression(self) -> Input: return self._h5_native_compression @property - def export_floats(self) -> Input: + def export_floats(self) -> Input[bool | GenericDataContainer]: r"""Allows to connect export_floats input to the operator. Converts double to float to reduce file size (default is true).If False, nodal results are exported as double precision and elemental results as single precision. @@ -442,7 +460,7 @@ def export_floats(self) -> Input: return self._export_floats @property - def filename(self) -> Input: + def filename(self) -> Input[str]: r"""Allows to connect filename input to the operator. filename of the migrated file @@ -463,7 +481,7 @@ def filename(self) -> Input: return self._filename @property - def comma_separated_list_of_results(self) -> Input: + def comma_separated_list_of_results(self) -> Input[str]: r"""Allows to connect comma_separated_list_of_results input to the operator. list of results (source operator names) separated by semicolons that will be stored. (Example: U;S;EPEL). If empty, all available results will be converted. @@ -484,7 +502,7 @@ def comma_separated_list_of_results(self) -> Input: return self._comma_separated_list_of_results @property - def all_time_sets(self) -> Input: + def all_time_sets(self) -> Input[bool]: r"""Allows to connect all_time_sets input to the operator. Deprecated. Please use filtering workflows instead to select time scoping. Default is false. @@ -505,7 +523,7 @@ def all_time_sets(self) -> Input: return self._all_time_sets @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. streams (result file container) (optional) @@ -526,7 +544,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. if the stream is null then we need to get the file path from the data sources @@ -547,7 +565,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def compression_workflow(self) -> Input: + def compression_workflow(self) -> Input[Workflow | GenericDataContainer]: r"""Allows to connect compression_workflow input to the operator. BETA Option: Applies input compression workflow. @@ -568,7 +586,7 @@ def compression_workflow(self) -> Input: return self._compression_workflow @property - def filtering_workflow(self) -> Input: + def filtering_workflow(self) -> Input[Workflow | GenericDataContainer]: r"""Allows to connect filtering_workflow input to the operator. Applies input filtering workflow. @@ -603,11 +621,13 @@ class OutputsMigrateToH5Dpf(_Outputs): def __init__(self, op: Operator): super().__init__(migrate_to_h5dpf._spec().outputs, op) - self._migrated_file = Output(migrate_to_h5dpf._spec().output_pin(0), 0, op) + self._migrated_file: Output[DataSources] = Output( + migrate_to_h5dpf._spec().output_pin(0), 0, op + ) self._outputs.append(self._migrated_file) @property - def migrated_file(self) -> Output: + def migrated_file(self) -> Output[DataSources]: r"""Allows to get migrated_file output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/modal_acceleration.py b/src/ansys/dpf/core/operators/result/modal_acceleration.py index 15c3dc0bdca..f8155e83b12 100644 --- a/src/ansys/dpf/core/operators/result/modal_acceleration.py +++ b/src/ansys/dpf/core/operators/result/modal_acceleration.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class modal_acceleration(Operator): r"""Read/compute modal acceleration by calling the readers defined by the @@ -251,29 +262,37 @@ class InputsModalAcceleration(_Inputs): def __init__(self, op: Operator): super().__init__(modal_acceleration._spec().inputs, op) - self._time_scoping = Input(modal_acceleration._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + modal_acceleration._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(modal_acceleration._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + modal_acceleration._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( modal_acceleration._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( modal_acceleration._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(modal_acceleration._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + modal_acceleration._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( modal_acceleration._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(modal_acceleration._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + modal_acceleration._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,11 +453,13 @@ class OutputsModalAcceleration(_Outputs): def __init__(self, op: Operator): super().__init__(modal_acceleration._spec().outputs, op) - self._fields_container = Output(modal_acceleration._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + modal_acceleration._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/modal_basis.py b/src/ansys/dpf/core/operators/result/modal_basis.py index d9646b6cf99..bb071771be6 100644 --- a/src/ansys/dpf/core/operators/result/modal_basis.py +++ b/src/ansys/dpf/core/operators/result/modal_basis.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class modal_basis(Operator): r"""Read/compute modal basis by calling the readers defined by the @@ -251,23 +262,37 @@ class InputsModalBasis(_Inputs): def __init__(self, op: Operator): super().__init__(modal_basis._spec().inputs, op) - self._time_scoping = Input(modal_basis._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + modal_basis._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(modal_basis._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + modal_basis._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(modal_basis._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + modal_basis._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(modal_basis._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + modal_basis._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(modal_basis._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + modal_basis._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(modal_basis._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + modal_basis._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(modal_basis._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + modal_basis._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -288,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -309,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -330,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -351,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -372,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -393,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -428,11 +453,13 @@ class OutputsModalBasis(_Outputs): def __init__(self, op: Operator): super().__init__(modal_basis._spec().outputs, op) - self._fields_container = Output(modal_basis._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + modal_basis._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/modal_coordinate.py b/src/ansys/dpf/core/operators/result/modal_coordinate.py index e11093a8cb2..49e0cb944c7 100644 --- a/src/ansys/dpf/core/operators/result/modal_coordinate.py +++ b/src/ansys/dpf/core/operators/result/modal_coordinate.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.core import errors @@ -15,6 +16,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class modal_coordinate(Operator): r"""Read/compute modal coordinate by calling the readers defined by the @@ -258,27 +269,37 @@ class InputsModalCoordinate(_Inputs): def __init__(self, op: Operator): super().__init__(modal_coordinate._spec().inputs, op) - self._time_scoping = Input(modal_coordinate._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + modal_coordinate._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(modal_coordinate._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + modal_coordinate._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(modal_coordinate._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + modal_coordinate._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( modal_coordinate._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(modal_coordinate._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + modal_coordinate._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( modal_coordinate._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(modal_coordinate._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + modal_coordinate._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -299,7 +320,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -320,7 +341,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -341,7 +362,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -362,7 +383,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -383,7 +404,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -404,7 +425,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -439,11 +460,13 @@ class OutputsModalCoordinate(_Outputs): def __init__(self, op: Operator): super().__init__(modal_coordinate._spec().outputs, op) - self._fields_container = Output(modal_coordinate._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + modal_coordinate._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/modal_velocity.py b/src/ansys/dpf/core/operators/result/modal_velocity.py index 04d290be152..262ab742fc4 100644 --- a/src/ansys/dpf/core/operators/result/modal_velocity.py +++ b/src/ansys/dpf/core/operators/result/modal_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class modal_velocity(Operator): r"""Read/compute modal velocity by calling the readers defined by the @@ -251,25 +262,37 @@ class InputsModalVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(modal_velocity._spec().inputs, op) - self._time_scoping = Input(modal_velocity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + modal_velocity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(modal_velocity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + modal_velocity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(modal_velocity._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + modal_velocity._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(modal_velocity._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + modal_velocity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(modal_velocity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + modal_velocity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( modal_velocity._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(modal_velocity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + modal_velocity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -290,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -311,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -332,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -353,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -374,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -395,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -430,11 +453,13 @@ class OutputsModalVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(modal_velocity._spec().outputs, op) - self._fields_container = Output(modal_velocity._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + modal_velocity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nmisc.py b/src/ansys/dpf/core/operators/result/nmisc.py index d33fa1462c6..95af3b571cf 100644 --- a/src/ansys/dpf/core/operators/result/nmisc.py +++ b/src/ansys/dpf/core/operators/result/nmisc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.core import errors @@ -15,6 +16,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nmisc(Operator): r"""Read/compute elemental non summable miscellaneous data by calling the @@ -358,35 +369,55 @@ class InputsNmisc(_Inputs): def __init__(self, op: Operator): super().__init__(nmisc._spec().inputs, op) - self._time_scoping = Input(nmisc._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + nmisc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nmisc._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + nmisc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nmisc._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + nmisc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nmisc._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + nmisc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nmisc._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + nmisc._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(nmisc._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + nmisc._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nmisc._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nmisc._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._item_index = Input(nmisc._spec().input_pin(10), 10, op, -1) + self._item_index: Input[int] = Input(nmisc._spec().input_pin(10), 10, op, -1) self._inputs.append(self._item_index) - self._num_components = Input(nmisc._spec().input_pin(11), 11, op, -1) + self._num_components: Input[int] = Input( + nmisc._spec().input_pin(11), 11, op, -1 + ) self._inputs.append(self._num_components) - self._read_cyclic = Input(nmisc._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input(nmisc._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(nmisc._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( + nmisc._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(nmisc._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( + nmisc._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(nmisc._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input(nmisc._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -407,7 +438,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -428,7 +459,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -449,7 +480,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -470,7 +501,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -491,7 +522,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -512,7 +543,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -533,7 +564,7 @@ def mesh(self) -> Input: return self._mesh @property - def item_index(self) -> Input: + def item_index(self) -> Input[int]: r"""Allows to connect item_index input to the operator. Index of requested item. @@ -554,7 +585,7 @@ def item_index(self) -> Input: return self._item_index @property - def num_components(self) -> Input: + def num_components(self) -> Input[int]: r"""Allows to connect num_components input to the operator. Number of components for the requested item. @@ -575,7 +606,7 @@ def num_components(self) -> Input: return self._num_components @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -596,7 +627,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -617,7 +648,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -638,7 +669,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -673,11 +704,13 @@ class OutputsNmisc(_Outputs): def __init__(self, op: Operator): super().__init__(nmisc._spec().outputs, op) - self._fields_container = Output(nmisc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + nmisc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_force.py b/src/ansys/dpf/core/operators/result/nodal_force.py index 7a991c14365..0e59f5afeae 100644 --- a/src/ansys/dpf/core/operators/result/nodal_force.py +++ b/src/ansys/dpf/core/operators/result/nodal_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_force(Operator): r"""Read/compute nodal forces by calling the readers defined by the @@ -251,23 +262,37 @@ class InputsNodalForce(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_force._spec().inputs, op) - self._time_scoping = Input(nodal_force._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + nodal_force._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + nodal_force._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_force._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + nodal_force._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_force._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + nodal_force._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_force._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + nodal_force._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(nodal_force._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + nodal_force._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_force._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_force._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -288,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -309,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -330,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -351,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -372,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -393,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -428,11 +453,13 @@ class OutputsNodalForce(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_force._spec().outputs, op) - self._fields_container = Output(nodal_force._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + nodal_force._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_moment.py b/src/ansys/dpf/core/operators/result/nodal_moment.py index 90fafe634a9..f887ba98f59 100644 --- a/src/ansys/dpf/core/operators/result/nodal_moment.py +++ b/src/ansys/dpf/core/operators/result/nodal_moment.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_moment(Operator): r"""Read/compute nodal moment by calling the readers defined by the @@ -251,25 +262,37 @@ class InputsNodalMoment(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_moment._spec().inputs, op) - self._time_scoping = Input(nodal_moment._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + nodal_moment._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_moment._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + nodal_moment._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_moment._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + nodal_moment._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_moment._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + nodal_moment._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_moment._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + nodal_moment._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_moment._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_moment._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_moment._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -290,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -311,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -332,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -353,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -374,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -395,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -430,11 +453,13 @@ class OutputsNodalMoment(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_moment._spec().outputs, op) - self._fields_container = Output(nodal_moment._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + nodal_moment._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotation.py b/src/ansys/dpf/core/operators/result/nodal_rotation.py index 61f90b076ac..bb365aa3b60 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotation.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotation.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotation(Operator): r"""Read/compute nodal rotation by calling the readers defined by the @@ -251,25 +262,37 @@ class InputsNodalRotation(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotation._spec().inputs, op) - self._time_scoping = Input(nodal_rotation._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + nodal_rotation._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_rotation._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + nodal_rotation._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_rotation._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + nodal_rotation._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_rotation._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + nodal_rotation._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_rotation._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + nodal_rotation._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotation._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_rotation._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_rotation._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -290,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -311,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -332,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -353,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -374,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -395,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -430,11 +453,13 @@ class OutputsNodalRotation(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotation._spec().outputs, op) - self._fields_container = Output(nodal_rotation._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + nodal_rotation._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotation_X.py b/src/ansys/dpf/core/operators/result/nodal_rotation_X.py index 45038a90e8a..7a6441a4c85 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotation_X.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotation_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotation_X(Operator): r"""Read/compute nodal rotation X component of the vector (1st component) by @@ -267,29 +278,41 @@ class InputsNodalRotationX(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotation_X._spec().inputs, op) - self._time_scoping = Input(nodal_rotation_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + nodal_rotation_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_rotation_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + nodal_rotation_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_rotation_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + nodal_rotation_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotation_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_rotation_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + nodal_rotation_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotation_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_rotation_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_rotation_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(nodal_rotation_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + nodal_rotation_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -310,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -331,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -352,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -373,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -394,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -415,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -436,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -471,11 +494,13 @@ class OutputsNodalRotationX(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotation_X._spec().outputs, op) - self._fields_container = Output(nodal_rotation_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + nodal_rotation_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotation_Y.py b/src/ansys/dpf/core/operators/result/nodal_rotation_Y.py index dd7c8f4d681..906908935ca 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotation_Y.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotation_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotation_Y(Operator): r"""Read/compute nodal rotation Y component of the vector (2nd component) by @@ -267,29 +278,41 @@ class InputsNodalRotationY(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotation_Y._spec().inputs, op) - self._time_scoping = Input(nodal_rotation_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + nodal_rotation_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_rotation_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + nodal_rotation_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_rotation_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + nodal_rotation_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotation_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_rotation_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + nodal_rotation_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotation_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_rotation_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_rotation_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(nodal_rotation_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + nodal_rotation_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -310,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -331,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -352,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -373,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -394,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -415,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -436,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -471,11 +494,13 @@ class OutputsNodalRotationY(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotation_Y._spec().outputs, op) - self._fields_container = Output(nodal_rotation_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + nodal_rotation_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotation_Z.py b/src/ansys/dpf/core/operators/result/nodal_rotation_Z.py index fc9c3cff0ab..f7d048c65b9 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotation_Z.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotation_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotation_Z(Operator): r"""Read/compute nodal rotation Z component of the vector (3rd component) by @@ -267,29 +278,41 @@ class InputsNodalRotationZ(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotation_Z._spec().inputs, op) - self._time_scoping = Input(nodal_rotation_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + nodal_rotation_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_rotation_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + nodal_rotation_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_rotation_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + nodal_rotation_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotation_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_rotation_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + nodal_rotation_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotation_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_rotation_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_rotation_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(nodal_rotation_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + nodal_rotation_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -310,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -331,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -352,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -373,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -394,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -415,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -436,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -471,11 +494,13 @@ class OutputsNodalRotationZ(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotation_Z._spec().outputs, op) - self._fields_container = Output(nodal_rotation_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + nodal_rotation_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration.py b/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration.py index 2d3a66be7f9..4756b2bf912 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotational_acceleration(Operator): r"""Read/compute nodal rotational acceleration by calling the readers @@ -251,37 +262,37 @@ class InputsNodalRotationalAcceleration(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_acceleration._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( nodal_rotational_acceleration._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( nodal_rotational_acceleration._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_rotational_acceleration._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotational_acceleration._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( nodal_rotational_acceleration._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotational_acceleration._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( nodal_rotational_acceleration._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -302,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -323,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -344,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -365,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -386,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -407,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -442,13 +453,13 @@ class OutputsNodalRotationalAcceleration(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_acceleration._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_rotational_acceleration._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_X.py b/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_X.py index 5c2adc6481b..7dececef4f4 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_X.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotational_acceleration_X(Operator): r"""Read/compute nodal rotational acceleration X component of the vector @@ -267,41 +278,41 @@ class InputsNodalRotationalAccelerationX(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_acceleration_X._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( nodal_rotational_acceleration_X._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( nodal_rotational_acceleration_X._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_rotational_acceleration_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotational_acceleration_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( nodal_rotational_acceleration_X._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotational_acceleration_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( nodal_rotational_acceleration_X._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( nodal_rotational_acceleration_X._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -322,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -343,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -364,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -385,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -406,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -427,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -448,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -483,13 +494,13 @@ class OutputsNodalRotationalAccelerationX(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_acceleration_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_rotational_acceleration_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_Y.py b/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_Y.py index 06509f65db8..6835dd1a02f 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_Y.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotational_acceleration_Y(Operator): r"""Read/compute nodal rotational acceleration Y component of the vector @@ -267,41 +278,41 @@ class InputsNodalRotationalAccelerationY(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_acceleration_Y._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( nodal_rotational_acceleration_Y._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( nodal_rotational_acceleration_Y._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_rotational_acceleration_Y._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotational_acceleration_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( nodal_rotational_acceleration_Y._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotational_acceleration_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( nodal_rotational_acceleration_Y._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( nodal_rotational_acceleration_Y._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -322,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -343,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -364,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -385,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -406,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -427,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -448,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -483,13 +494,13 @@ class OutputsNodalRotationalAccelerationY(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_acceleration_Y._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_rotational_acceleration_Y._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_Z.py b/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_Z.py index 914c3a430ae..1ee4e319d39 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_Z.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotational_acceleration_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotational_acceleration_Z(Operator): r"""Read/compute nodal rotational acceleration Z component of the vector @@ -267,41 +278,41 @@ class InputsNodalRotationalAccelerationZ(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_acceleration_Z._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( nodal_rotational_acceleration_Z._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( nodal_rotational_acceleration_Z._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_rotational_acceleration_Z._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotational_acceleration_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( nodal_rotational_acceleration_Z._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotational_acceleration_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( nodal_rotational_acceleration_Z._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( nodal_rotational_acceleration_Z._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -322,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -343,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -364,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -385,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -406,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -427,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -448,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -483,13 +494,13 @@ class OutputsNodalRotationalAccelerationZ(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_acceleration_Z._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_rotational_acceleration_Z._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotational_velocity.py b/src/ansys/dpf/core/operators/result/nodal_rotational_velocity.py index b0a7003983c..6ee6a380439 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotational_velocity.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotational_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotational_velocity(Operator): r"""Read/compute nodal rotational velocity by calling the readers defined by @@ -251,35 +262,37 @@ class InputsNodalRotationalVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_velocity._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( nodal_rotational_velocity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( nodal_rotational_velocity._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_rotational_velocity._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotational_velocity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( nodal_rotational_velocity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotational_velocity._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_rotational_velocity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_rotational_velocity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsNodalRotationalVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_velocity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_rotational_velocity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_X.py b/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_X.py index 3634047ab02..ef79e09b8fc 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_X.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotational_velocity_X(Operator): r"""Read/compute nodal rotational velocity X component of the vector (1st @@ -267,39 +278,41 @@ class InputsNodalRotationalVelocityX(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_velocity_X._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( nodal_rotational_velocity_X._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( nodal_rotational_velocity_X._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_rotational_velocity_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotational_velocity_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( nodal_rotational_velocity_X._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotational_velocity_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_rotational_velocity_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_rotational_velocity_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( nodal_rotational_velocity_X._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -320,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -341,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -362,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -383,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -404,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -425,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -446,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -481,13 +494,13 @@ class OutputsNodalRotationalVelocityX(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_velocity_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_rotational_velocity_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_Y.py b/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_Y.py index de3f70c575a..66ad24d1b05 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_Y.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotational_velocity_Y(Operator): r"""Read/compute nodal rotational velocity Y component of the vector (2nd @@ -267,39 +278,41 @@ class InputsNodalRotationalVelocityY(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_velocity_Y._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( nodal_rotational_velocity_Y._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( nodal_rotational_velocity_Y._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_rotational_velocity_Y._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotational_velocity_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( nodal_rotational_velocity_Y._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotational_velocity_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_rotational_velocity_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_rotational_velocity_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( nodal_rotational_velocity_Y._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -320,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -341,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -362,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -383,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -404,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -425,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -446,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -481,13 +494,13 @@ class OutputsNodalRotationalVelocityY(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_velocity_Y._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_rotational_velocity_Y._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_Z.py b/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_Z.py index 5c7b0f4d26a..31595978db0 100644 --- a/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_Z.py +++ b/src/ansys/dpf/core/operators/result/nodal_rotational_velocity_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class nodal_rotational_velocity_Z(Operator): r"""Read/compute nodal rotational velocity Z component of the vector (3rd @@ -267,39 +278,41 @@ class InputsNodalRotationalVelocityZ(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_velocity_Z._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( nodal_rotational_velocity_Z._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( nodal_rotational_velocity_Z._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( nodal_rotational_velocity_Z._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( nodal_rotational_velocity_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( nodal_rotational_velocity_Z._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( nodal_rotational_velocity_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_rotational_velocity_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + nodal_rotational_velocity_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( nodal_rotational_velocity_Z._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -320,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -341,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -362,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -383,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -404,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -425,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -446,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -481,13 +494,13 @@ class OutputsNodalRotationalVelocityZ(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_rotational_velocity_Z._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( nodal_rotational_velocity_Z._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/nodal_to_global.py b/src/ansys/dpf/core/operators/result/nodal_to_global.py index afd9ae6ea47..262fc48611b 100644 --- a/src/ansys/dpf/core/operators/result/nodal_to_global.py +++ b/src/ansys/dpf/core/operators/result/nodal_to_global.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class nodal_to_global(Operator): r"""Rotates nodal elemental results to global coordinate system @@ -161,13 +165,17 @@ class InputsNodalToGlobal(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_to_global._spec().inputs, op) - self._fieldA = Input(nodal_to_global._spec().input_pin(0), 0, op, -1) + self._fieldA: Input[Field] = Input( + nodal_to_global._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fieldA) - self._fieldB = Input(nodal_to_global._spec().input_pin(1), 1, op, -1) + self._fieldB: Input[Field] = Input( + nodal_to_global._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fieldB) @property - def fieldA(self) -> Input: + def fieldA(self) -> Input[Field]: r"""Allows to connect fieldA input to the operator. Vector or tensor field that must be rotated, expressed in nodal coordinate system. @@ -188,7 +196,7 @@ def fieldA(self) -> Input: return self._fieldA @property - def fieldB(self) -> Input: + def fieldB(self) -> Input[Field]: r"""Allows to connect fieldB input to the operator. Nodal euler angles defined from a result file. Those must be the rotations from Nodal to Global. @@ -223,11 +231,13 @@ class OutputsNodalToGlobal(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_to_global._spec().outputs, op) - self._field = Output(nodal_to_global._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + nodal_to_global._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Rotated field diff --git a/src/ansys/dpf/core/operators/result/node_orientations.py b/src/ansys/dpf/core/operators/result/node_orientations.py index 1bf1599cf70..2a966ce1d87 100644 --- a/src/ansys/dpf/core/operators/result/node_orientations.py +++ b/src/ansys/dpf/core/operators/result/node_orientations.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class node_orientations(Operator): r"""Read/compute node euler angles by calling the readers defined by the @@ -251,29 +262,37 @@ class InputsNodeOrientations(_Inputs): def __init__(self, op: Operator): super().__init__(node_orientations._spec().inputs, op) - self._time_scoping = Input(node_orientations._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + node_orientations._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(node_orientations._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + node_orientations._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( node_orientations._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( node_orientations._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(node_orientations._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + node_orientations._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( node_orientations._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(node_orientations._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + node_orientations._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,11 +453,13 @@ class OutputsNodeOrientations(_Outputs): def __init__(self, op: Operator): super().__init__(node_orientations._spec().outputs, op) - self._fields_container = Output(node_orientations._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + node_orientations._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/node_orientations_X.py b/src/ansys/dpf/core/operators/result/node_orientations_X.py index 0950c72dc45..1310b1f980f 100644 --- a/src/ansys/dpf/core/operators/result/node_orientations_X.py +++ b/src/ansys/dpf/core/operators/result/node_orientations_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class node_orientations_X(Operator): r"""Read/compute node euler angles X component of the vector (1st component) @@ -267,31 +278,41 @@ class InputsNodeOrientationsX(_Inputs): def __init__(self, op: Operator): super().__init__(node_orientations_X._spec().inputs, op) - self._time_scoping = Input(node_orientations_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + node_orientations_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(node_orientations_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + node_orientations_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( node_orientations_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( node_orientations_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(node_orientations_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + node_orientations_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( node_orientations_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(node_orientations_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + node_orientations_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(node_orientations_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + node_orientations_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -312,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -333,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -354,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -375,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -396,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -417,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -438,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -473,13 +494,13 @@ class OutputsNodeOrientationsX(_Outputs): def __init__(self, op: Operator): super().__init__(node_orientations_X._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( node_orientations_X._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/node_orientations_Y.py b/src/ansys/dpf/core/operators/result/node_orientations_Y.py index 234a1f4ee35..0f23ab5c80d 100644 --- a/src/ansys/dpf/core/operators/result/node_orientations_Y.py +++ b/src/ansys/dpf/core/operators/result/node_orientations_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class node_orientations_Y(Operator): r"""Read/compute node euler angles Y component of the vector (2nd component) @@ -267,31 +278,41 @@ class InputsNodeOrientationsY(_Inputs): def __init__(self, op: Operator): super().__init__(node_orientations_Y._spec().inputs, op) - self._time_scoping = Input(node_orientations_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + node_orientations_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(node_orientations_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + node_orientations_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( node_orientations_Y._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( node_orientations_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(node_orientations_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + node_orientations_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( node_orientations_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(node_orientations_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + node_orientations_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(node_orientations_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + node_orientations_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -312,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -333,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -354,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -375,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -396,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -417,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -438,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -473,13 +494,13 @@ class OutputsNodeOrientationsY(_Outputs): def __init__(self, op: Operator): super().__init__(node_orientations_Y._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( node_orientations_Y._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/node_orientations_Z.py b/src/ansys/dpf/core/operators/result/node_orientations_Z.py index fc156a585a0..cd452562ee4 100644 --- a/src/ansys/dpf/core/operators/result/node_orientations_Z.py +++ b/src/ansys/dpf/core/operators/result/node_orientations_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class node_orientations_Z(Operator): r"""Read/compute node euler angles Z component of the vector (3rd component) @@ -267,31 +278,41 @@ class InputsNodeOrientationsZ(_Inputs): def __init__(self, op: Operator): super().__init__(node_orientations_Z._spec().inputs, op) - self._time_scoping = Input(node_orientations_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + node_orientations_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(node_orientations_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + node_orientations_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( node_orientations_Z._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( node_orientations_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(node_orientations_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + node_orientations_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( node_orientations_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(node_orientations_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + node_orientations_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(node_orientations_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + node_orientations_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -312,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -333,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -354,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -375,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -396,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -417,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -438,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -473,13 +494,13 @@ class OutputsNodeOrientationsZ(_Outputs): def __init__(self, op: Operator): super().__init__(node_orientations_Z._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( node_orientations_Z._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/normal_contact_force.py b/src/ansys/dpf/core/operators/result/normal_contact_force.py index 4d1b002bc9f..b4c95734e8e 100644 --- a/src/ansys/dpf/core/operators/result/normal_contact_force.py +++ b/src/ansys/dpf/core/operators/result/normal_contact_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class normal_contact_force(Operator): r"""Read/compute normal contact force by calling the readers defined by the @@ -251,29 +262,37 @@ class InputsNormalContactForce(_Inputs): def __init__(self, op: Operator): super().__init__(normal_contact_force._spec().inputs, op) - self._time_scoping = Input(normal_contact_force._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + normal_contact_force._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(normal_contact_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + normal_contact_force._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( normal_contact_force._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( normal_contact_force._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(normal_contact_force._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + normal_contact_force._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( normal_contact_force._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(normal_contact_force._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + normal_contact_force._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,13 +453,13 @@ class OutputsNormalContactForce(_Outputs): def __init__(self, op: Operator): super().__init__(normal_contact_force._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( normal_contact_force._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/normal_contact_moment.py b/src/ansys/dpf/core/operators/result/normal_contact_moment.py index 4eccfdd7cae..808bd28cc74 100644 --- a/src/ansys/dpf/core/operators/result/normal_contact_moment.py +++ b/src/ansys/dpf/core/operators/result/normal_contact_moment.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class normal_contact_moment(Operator): r"""Read/compute normal contact moment by calling the readers defined by the @@ -251,35 +262,37 @@ class InputsNormalContactMoment(_Inputs): def __init__(self, op: Operator): super().__init__(normal_contact_moment._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( normal_contact_moment._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( normal_contact_moment._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( normal_contact_moment._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( normal_contact_moment._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( normal_contact_moment._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( normal_contact_moment._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(normal_contact_moment._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + normal_contact_moment._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsNormalContactMoment(_Outputs): def __init__(self, op: Operator): super().__init__(normal_contact_moment._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( normal_contact_moment._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/num_surface_status_changes.py b/src/ansys/dpf/core/operators/result/num_surface_status_changes.py index 8bcc99ed6ea..182449a0ef4 100644 --- a/src/ansys/dpf/core/operators/result/num_surface_status_changes.py +++ b/src/ansys/dpf/core/operators/result/num_surface_status_changes.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class num_surface_status_changes(Operator): r"""Read/compute element total number of contact status changes during @@ -619,69 +630,73 @@ class InputsNumSurfaceStatusChanges(_Inputs): def __init__(self, op: Operator): super().__init__(num_surface_status_changes._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( num_surface_status_changes._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( num_surface_status_changes._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( num_surface_status_changes._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( num_surface_status_changes._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( num_surface_status_changes._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( num_surface_status_changes._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(num_surface_status_changes._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + num_surface_status_changes._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( num_surface_status_changes._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( num_surface_status_changes._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( num_surface_status_changes._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( num_surface_status_changes._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(num_surface_status_changes._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + num_surface_status_changes._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( num_surface_status_changes._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( num_surface_status_changes._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( num_surface_status_changes._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( num_surface_status_changes._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -702,7 +717,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -723,7 +738,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -744,7 +759,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -765,7 +780,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -786,7 +801,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -807,7 +822,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -828,7 +843,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -849,7 +864,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -870,7 +885,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -891,7 +906,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -912,7 +927,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -933,7 +948,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -954,7 +969,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -975,7 +990,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -996,7 +1011,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1031,13 +1046,13 @@ class OutputsNumSurfaceStatusChanges(_Outputs): def __init__(self, op: Operator): super().__init__(num_surface_status_changes._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( num_surface_status_changes._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/omega.py b/src/ansys/dpf/core/operators/result/omega.py index 0f47574662f..3737a1f0531 100644 --- a/src/ansys/dpf/core/operators/result/omega.py +++ b/src/ansys/dpf/core/operators/result/omega.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class omega(Operator): r"""Read Turbulent Specific Dissipation Rate (omega) by calling the readers @@ -267,25 +278,41 @@ class InputsOmega(_Inputs): def __init__(self, op: Operator): super().__init__(omega._spec().inputs, op) - self._time_scoping = Input(omega._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + omega._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(omega._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + omega._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(omega._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + omega._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(omega._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + omega._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(omega._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + omega._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(omega._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + omega._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(omega._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + omega._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(omega._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + omega._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -306,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -327,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -348,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -369,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -390,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -411,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -432,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -467,11 +494,13 @@ class OutputsOmega(_Outputs): def __init__(self, op: Operator): super().__init__(omega._spec().outputs, op) - self._fields_container = Output(omega._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + omega._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/part_added_mass.py b/src/ansys/dpf/core/operators/result/part_added_mass.py index 92a3c88f5af..fcfccdb8fa9 100644 --- a/src/ansys/dpf/core/operators/result/part_added_mass.py +++ b/src/ansys/dpf/core/operators/result/part_added_mass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class part_added_mass(Operator): r"""Read Part Added Mass (LSDyna) by calling the readers defined by the @@ -200,17 +208,25 @@ class InputsPartAddedMass(_Inputs): def __init__(self, op: Operator): super().__init__(part_added_mass._spec().inputs, op) - self._streams_container = Input(part_added_mass._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + part_added_mass._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(part_added_mass._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + part_added_mass._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._entity_scoping = Input(part_added_mass._spec().input_pin(6), 6, op, -1) + self._entity_scoping: Input[Scoping] = Input( + part_added_mass._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._entity_scoping) - self._unit_system = Input(part_added_mass._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + part_added_mass._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -231,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -252,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -273,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -308,11 +324,13 @@ class OutputsPartAddedMass(_Outputs): def __init__(self, op: Operator): super().__init__(part_added_mass._spec().outputs, op) - self._fields_container = Output(part_added_mass._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + part_added_mass._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/part_eroded_internal_energy.py b/src/ansys/dpf/core/operators/result/part_eroded_internal_energy.py index 9d510ffb098..e2e7988d002 100644 --- a/src/ansys/dpf/core/operators/result/part_eroded_internal_energy.py +++ b/src/ansys/dpf/core/operators/result/part_eroded_internal_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class part_eroded_internal_energy(Operator): r"""Read Part Eroded Internal Energy (LSDyna) by calling the readers defined @@ -200,25 +208,25 @@ class InputsPartErodedInternalEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(part_eroded_internal_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( part_eroded_internal_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( part_eroded_internal_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( part_eroded_internal_energy._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( part_eroded_internal_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -239,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -260,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -281,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -316,13 +324,13 @@ class OutputsPartErodedInternalEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(part_eroded_internal_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( part_eroded_internal_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/part_eroded_kinetic_energy.py b/src/ansys/dpf/core/operators/result/part_eroded_kinetic_energy.py index 6e3f5e2f4f2..3f7412de998 100644 --- a/src/ansys/dpf/core/operators/result/part_eroded_kinetic_energy.py +++ b/src/ansys/dpf/core/operators/result/part_eroded_kinetic_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class part_eroded_kinetic_energy(Operator): r"""Read Part Eroded Kinetic Energy (LSDyna) by calling the readers defined @@ -200,25 +208,25 @@ class InputsPartErodedKineticEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(part_eroded_kinetic_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( part_eroded_kinetic_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( part_eroded_kinetic_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( part_eroded_kinetic_energy._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( part_eroded_kinetic_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -239,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -260,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -281,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -316,13 +324,13 @@ class OutputsPartErodedKineticEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(part_eroded_kinetic_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( part_eroded_kinetic_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/part_hourglass_energy.py b/src/ansys/dpf/core/operators/result/part_hourglass_energy.py index f79f3d0a61b..5527167b209 100644 --- a/src/ansys/dpf/core/operators/result/part_hourglass_energy.py +++ b/src/ansys/dpf/core/operators/result/part_hourglass_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class part_hourglass_energy(Operator): r"""Read Part Hourglass Energy (LSDyna) by calling the readers defined by @@ -200,25 +208,25 @@ class InputsPartHourglassEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(part_hourglass_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( part_hourglass_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( part_hourglass_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( part_hourglass_energy._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( part_hourglass_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -239,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -260,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -281,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -316,13 +324,13 @@ class OutputsPartHourglassEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(part_hourglass_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( part_hourglass_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/part_internal_energy.py b/src/ansys/dpf/core/operators/result/part_internal_energy.py index 338866054ee..8042614ed22 100644 --- a/src/ansys/dpf/core/operators/result/part_internal_energy.py +++ b/src/ansys/dpf/core/operators/result/part_internal_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class part_internal_energy(Operator): r"""Read Part Internal Energy (LSDyna) by calling the readers defined by the @@ -200,23 +208,25 @@ class InputsPartInternalEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(part_internal_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( part_internal_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(part_internal_energy._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + part_internal_energy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( part_internal_energy._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( part_internal_energy._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -237,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -258,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -279,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -314,13 +324,13 @@ class OutputsPartInternalEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(part_internal_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( part_internal_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/part_kinetic_energy.py b/src/ansys/dpf/core/operators/result/part_kinetic_energy.py index e96b0eac2e5..68ccdcf92be 100644 --- a/src/ansys/dpf/core/operators/result/part_kinetic_energy.py +++ b/src/ansys/dpf/core/operators/result/part_kinetic_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class part_kinetic_energy(Operator): r"""Read Part Kinetic Energy (LSDyna) by calling the readers defined by the @@ -200,21 +208,25 @@ class InputsPartKineticEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(part_kinetic_energy._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( part_kinetic_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(part_kinetic_energy._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + part_kinetic_energy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( part_kinetic_energy._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input(part_kinetic_energy._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + part_kinetic_energy._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -235,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -256,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -277,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -312,13 +324,13 @@ class OutputsPartKineticEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(part_kinetic_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( part_kinetic_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/part_momentum.py b/src/ansys/dpf/core/operators/result/part_momentum.py index 1bbd73d8426..273bcc30cd1 100644 --- a/src/ansys/dpf/core/operators/result/part_momentum.py +++ b/src/ansys/dpf/core/operators/result/part_momentum.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class part_momentum(Operator): r"""Read Part Momentum (LSDyna) by calling the readers defined by the @@ -200,17 +208,25 @@ class InputsPartMomentum(_Inputs): def __init__(self, op: Operator): super().__init__(part_momentum._spec().inputs, op) - self._streams_container = Input(part_momentum._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + part_momentum._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(part_momentum._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + part_momentum._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._entity_scoping = Input(part_momentum._spec().input_pin(6), 6, op, -1) + self._entity_scoping: Input[Scoping] = Input( + part_momentum._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._entity_scoping) - self._unit_system = Input(part_momentum._spec().input_pin(50), 50, op, -1) + self._unit_system: Input[int | str | UnitSystem] = Input( + part_momentum._spec().input_pin(50), 50, op, -1 + ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -231,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -252,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -273,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -308,11 +324,13 @@ class OutputsPartMomentum(_Outputs): def __init__(self, op: Operator): super().__init__(part_momentum._spec().outputs, op) - self._fields_container = Output(part_momentum._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + part_momentum._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/part_rigid_body_velocity.py b/src/ansys/dpf/core/operators/result/part_rigid_body_velocity.py index 14b6fcaa0f4..260711ea366 100644 --- a/src/ansys/dpf/core/operators/result/part_rigid_body_velocity.py +++ b/src/ansys/dpf/core/operators/result/part_rigid_body_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.unit_system import UnitSystem + class part_rigid_body_velocity(Operator): r"""Read Part Rigid Body Velocity (LSDyna) by calling the readers defined by @@ -200,25 +208,25 @@ class InputsPartRigidBodyVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(part_rigid_body_velocity._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( part_rigid_body_velocity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( part_rigid_body_velocity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._entity_scoping = Input( + self._entity_scoping: Input[Scoping] = Input( part_rigid_body_velocity._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._entity_scoping) - self._unit_system = Input( + self._unit_system: Input[int | str | UnitSystem] = Input( part_rigid_body_velocity._spec().input_pin(50), 50, op, -1 ) self._inputs.append(self._unit_system) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -239,7 +247,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -260,7 +268,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def entity_scoping(self) -> Input: + def entity_scoping(self) -> Input[Scoping]: r"""Allows to connect entity_scoping input to the operator. entity (part for matsum, interface for rcforc) where the result will be scoped @@ -281,7 +289,7 @@ def entity_scoping(self) -> Input: return self._entity_scoping @property - def unit_system(self) -> Input: + def unit_system(self) -> Input[int | str | UnitSystem]: r"""Allows to connect unit_system input to the operator. (LSDyna) Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance @@ -316,13 +324,13 @@ class OutputsPartRigidBodyVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(part_rigid_body_velocity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( part_rigid_body_velocity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_state_variable.py b/src/ansys/dpf/core/operators/result/plastic_state_variable.py index 42c8c842b54..5ab589f762f 100644 --- a/src/ansys/dpf/core/operators/result/plastic_state_variable.py +++ b/src/ansys/dpf/core/operators/result/plastic_state_variable.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_state_variable(Operator): r"""Read/compute element nodal plastic state variable by calling the readers @@ -555,55 +566,57 @@ class InputsPlasticStateVariable(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_state_variable._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( plastic_state_variable._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( plastic_state_variable._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_state_variable._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_state_variable._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( plastic_state_variable._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_state_variable._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_state_variable._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_state_variable._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_state_variable._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( plastic_state_variable._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( plastic_state_variable._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( plastic_state_variable._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( plastic_state_variable._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -624,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -645,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -666,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -687,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -708,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -729,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -750,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -771,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -792,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -813,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -834,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -869,13 +882,13 @@ class OutputsPlasticStateVariable(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_state_variable._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( plastic_state_variable._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain.py b/src/ansys/dpf/core/operators/result/plastic_strain.py index 7064ce230a8..511ba41ec25 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain(Operator): r"""Read/compute element nodal component plastic strains by calling the @@ -555,37 +566,57 @@ class InputsPlasticStrain(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain._spec().inputs, op) - self._time_scoping = Input(plastic_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + plastic_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + plastic_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + plastic_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + plastic_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + plastic_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + plastic_strain._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_beams = Input(plastic_strain._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + plastic_strain._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(plastic_strain._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + plastic_strain._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(plastic_strain._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + plastic_strain._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( plastic_strain._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -606,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -627,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -648,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -669,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -690,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -711,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -732,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -753,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -774,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -795,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -816,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -851,11 +882,13 @@ class OutputsPlasticStrain(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain._spec().outputs, op) - self._fields_container = Output(plastic_strain._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + plastic_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_X.py b/src/ansys/dpf/core/operators/result/plastic_strain_X.py index 399d586f151..b80158d0584 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_X.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_X(Operator): r"""Read/compute element nodal component plastic strains XX normal component @@ -303,35 +314,49 @@ class InputsPlasticStrainX(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_X._spec().inputs, op) - self._time_scoping = Input(plastic_strain_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + plastic_strain_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + plastic_strain_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + plastic_strain_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + plastic_strain_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + plastic_strain_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_X._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + plastic_strain_X._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsPlasticStrainX(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_X._spec().outputs, op) - self._fields_container = Output(plastic_strain_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + plastic_strain_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_XY.py b/src/ansys/dpf/core/operators/result/plastic_strain_XY.py index 11e5379fe69..0c8805d2677 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_XY.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_XY(Operator): r"""Read/compute element nodal component plastic strains XY shear component @@ -303,37 +314,49 @@ class InputsPlasticStrainXy(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_XY._spec().inputs, op) - self._time_scoping = Input(plastic_strain_XY._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + plastic_strain_XY._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_XY._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + plastic_strain_XY._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_XY._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_XY._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_XY._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + plastic_strain_XY._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_XY._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_XY._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_XY._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_XY._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + plastic_strain_XY._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_XY._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + plastic_strain_XY._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsPlasticStrainXy(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_XY._spec().outputs, op) - self._fields_container = Output(plastic_strain_XY._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + plastic_strain_XY._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_XZ.py b/src/ansys/dpf/core/operators/result/plastic_strain_XZ.py index 56528d1aed4..c1a63035f4d 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_XZ.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_XZ(Operator): r"""Read/compute element nodal component plastic strains XZ shear component @@ -303,37 +314,49 @@ class InputsPlasticStrainXz(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_XZ._spec().inputs, op) - self._time_scoping = Input(plastic_strain_XZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + plastic_strain_XZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_XZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + plastic_strain_XZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_XZ._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_XZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + plastic_strain_XZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_XZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_XZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_XZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_XZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + plastic_strain_XZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_XZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + plastic_strain_XZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsPlasticStrainXz(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_XZ._spec().outputs, op) - self._fields_container = Output(plastic_strain_XZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + plastic_strain_XZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_Y.py b/src/ansys/dpf/core/operators/result/plastic_strain_Y.py index 3d13e47b6fb..4e261150e35 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_Y.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_Y(Operator): r"""Read/compute element nodal component plastic strains YY normal component @@ -303,35 +314,49 @@ class InputsPlasticStrainY(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_Y._spec().inputs, op) - self._time_scoping = Input(plastic_strain_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + plastic_strain_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + plastic_strain_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + plastic_strain_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + plastic_strain_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + plastic_strain_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_Y._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + plastic_strain_Y._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsPlasticStrainY(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_Y._spec().outputs, op) - self._fields_container = Output(plastic_strain_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + plastic_strain_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_YZ.py b/src/ansys/dpf/core/operators/result/plastic_strain_YZ.py index 2db889954db..235e42f23a4 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_YZ.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_YZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_YZ(Operator): r"""Read/compute element nodal component plastic strains YZ shear component @@ -303,37 +314,49 @@ class InputsPlasticStrainYz(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_YZ._spec().inputs, op) - self._time_scoping = Input(plastic_strain_YZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + plastic_strain_YZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_YZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + plastic_strain_YZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_YZ._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_YZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_YZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + plastic_strain_YZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_YZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_YZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_YZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_YZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_YZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + plastic_strain_YZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_YZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + plastic_strain_YZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsPlasticStrainYz(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_YZ._spec().outputs, op) - self._fields_container = Output(plastic_strain_YZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + plastic_strain_YZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_Z.py b/src/ansys/dpf/core/operators/result/plastic_strain_Z.py index 05f74a55996..0ca583132d2 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_Z.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_Z(Operator): r"""Read/compute element nodal component plastic strains ZZ normal component @@ -303,35 +314,49 @@ class InputsPlasticStrainZ(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_Z._spec().inputs, op) - self._time_scoping = Input(plastic_strain_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + plastic_strain_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + plastic_strain_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + plastic_strain_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + plastic_strain_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + plastic_strain_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_Z._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + plastic_strain_Z._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsPlasticStrainZ(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_Z._spec().outputs, op) - self._fields_container = Output(plastic_strain_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + plastic_strain_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_energy_density.py b/src/ansys/dpf/core/operators/result/plastic_strain_energy_density.py index 26d35b3c417..b2ad73ce740 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_energy_density.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_energy_density.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_energy_density(Operator): r"""Read/compute element nodal plastic strain energy density by calling the @@ -555,57 +566,57 @@ class InputsPlasticStrainEnergyDensity(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_energy_density._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( plastic_strain_energy_density._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( plastic_strain_energy_density._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_energy_density._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_energy_density._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( plastic_strain_energy_density._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_energy_density._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( plastic_strain_energy_density._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_energy_density._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( plastic_strain_energy_density._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( plastic_strain_energy_density._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( plastic_strain_energy_density._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( plastic_strain_energy_density._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -626,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -647,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -668,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -689,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -710,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -731,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -752,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -773,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -794,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -815,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -836,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -871,13 +882,13 @@ class OutputsPlasticStrainEnergyDensity(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_energy_density._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( plastic_strain_energy_density._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_eqv.py b/src/ansys/dpf/core/operators/result/plastic_strain_eqv.py index 6f6c738ee90..03ede6dfe51 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_eqv.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_eqv.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_eqv(Operator): r"""Read/compute element nodal equivalent plastic strain by calling the @@ -555,43 +566,57 @@ class InputsPlasticStrainEqv(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_eqv._spec().inputs, op) - self._time_scoping = Input(plastic_strain_eqv._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + plastic_strain_eqv._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_eqv._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + plastic_strain_eqv._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_eqv._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_eqv._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_eqv._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + plastic_strain_eqv._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_eqv._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_eqv._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_eqv._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_eqv._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input(plastic_strain_eqv._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + plastic_strain_eqv._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(plastic_strain_eqv._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + plastic_strain_eqv._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(plastic_strain_eqv._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + plastic_strain_eqv._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( plastic_strain_eqv._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -612,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -633,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -654,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -675,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -696,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -717,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -738,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -759,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -780,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -801,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -822,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -857,11 +882,13 @@ class OutputsPlasticStrainEqv(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_eqv._spec().outputs, op) - self._fields_container = Output(plastic_strain_eqv._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + plastic_strain_eqv._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_intensity.py b/src/ansys/dpf/core/operators/result/plastic_strain_intensity.py index 39443850020..479d78ff169 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_intensity.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_intensity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_intensity(Operator): r"""Reads/computes element nodal component plastic strains, average it on @@ -304,47 +315,49 @@ class InputsPlasticStrainIntensity(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_intensity._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( plastic_strain_intensity._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( plastic_strain_intensity._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_intensity._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_intensity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( plastic_strain_intensity._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_intensity._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_intensity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_intensity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_intensity._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( plastic_strain_intensity._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( plastic_strain_intensity._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -365,7 +378,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -386,7 +399,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -407,7 +420,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -428,7 +441,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -449,7 +462,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -470,7 +483,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -491,7 +504,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -510,7 +523,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -531,7 +544,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -566,13 +579,13 @@ class OutputsPlasticStrainIntensity(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_intensity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( plastic_strain_intensity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_max_shear.py b/src/ansys/dpf/core/operators/result/plastic_strain_max_shear.py index 97e0a2d9b5d..1dec1cfc766 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_max_shear.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_max_shear.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_max_shear(Operator): r"""Reads/computes element nodal component plastic strains, average it on @@ -304,47 +315,49 @@ class InputsPlasticStrainMaxShear(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_max_shear._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( plastic_strain_max_shear._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( plastic_strain_max_shear._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_max_shear._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_max_shear._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( plastic_strain_max_shear._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_max_shear._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_max_shear._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_max_shear._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_max_shear._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( plastic_strain_max_shear._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( plastic_strain_max_shear._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -365,7 +378,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -386,7 +399,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -407,7 +420,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -428,7 +441,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -449,7 +462,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -470,7 +483,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -491,7 +504,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -510,7 +523,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -531,7 +544,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -566,13 +579,13 @@ class OutputsPlasticStrainMaxShear(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_max_shear._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( plastic_strain_max_shear._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_principal_1.py b/src/ansys/dpf/core/operators/result/plastic_strain_principal_1.py index 68ace62df59..4e9f53c8b36 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_principal_1.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_principal_1.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_principal_1(Operator): r"""Read/compute element nodal component plastic strains 1st principal @@ -308,47 +319,49 @@ class InputsPlasticStrainPrincipal1(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_principal_1._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( plastic_strain_principal_1._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( plastic_strain_principal_1._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_principal_1._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_principal_1._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( plastic_strain_principal_1._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_principal_1._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_principal_1._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_principal_1._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_principal_1._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( plastic_strain_principal_1._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( plastic_strain_principal_1._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsPlasticStrainPrincipal1(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_principal_1._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( plastic_strain_principal_1._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_principal_2.py b/src/ansys/dpf/core/operators/result/plastic_strain_principal_2.py index 7fcf85b66ce..e4b8d599206 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_principal_2.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_principal_2.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_principal_2(Operator): r"""Read/compute element nodal component plastic strains 2nd principal @@ -308,47 +319,49 @@ class InputsPlasticStrainPrincipal2(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_principal_2._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( plastic_strain_principal_2._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( plastic_strain_principal_2._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_principal_2._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_principal_2._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( plastic_strain_principal_2._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_principal_2._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_principal_2._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_principal_2._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_principal_2._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( plastic_strain_principal_2._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( plastic_strain_principal_2._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsPlasticStrainPrincipal2(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_principal_2._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( plastic_strain_principal_2._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_principal_3.py b/src/ansys/dpf/core/operators/result/plastic_strain_principal_3.py index 71640e6e2a3..fbc3f88f742 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_principal_3.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_principal_3.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_principal_3(Operator): r"""Read/compute element nodal component plastic strains 3rd principal @@ -308,47 +319,49 @@ class InputsPlasticStrainPrincipal3(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_principal_3._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( plastic_strain_principal_3._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( plastic_strain_principal_3._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_principal_3._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_principal_3._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( plastic_strain_principal_3._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( plastic_strain_principal_3._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_principal_3._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + plastic_strain_principal_3._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( plastic_strain_principal_3._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( plastic_strain_principal_3._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( plastic_strain_principal_3._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsPlasticStrainPrincipal3(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_principal_3._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( plastic_strain_principal_3._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/plastic_strain_rotation_by_euler_nodes.py b/src/ansys/dpf/core/operators/result/plastic_strain_rotation_by_euler_nodes.py index cceb48b8b42..94f6787b873 100644 --- a/src/ansys/dpf/core/operators/result/plastic_strain_rotation_by_euler_nodes.py +++ b/src/ansys/dpf/core/operators/result/plastic_strain_rotation_by_euler_nodes.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class plastic_strain_rotation_by_euler_nodes(Operator): r"""read Euler angles on elements from the result file and rotate the fields @@ -24,7 +30,7 @@ class plastic_strain_rotation_by_euler_nodes(Operator): ------ fields_container: FieldsContainer, optional streams_container: StreamsContainer or Stream or Class - Dataprocessing::Crstfilewrapper, optional + Dataprocessing::Crstfilewrapper, optional data_sources: DataSources Outputs @@ -188,21 +194,21 @@ class InputsPlasticStrainRotationByEulerNodes(_Inputs): def __init__(self, op: Operator): super().__init__(plastic_strain_rotation_by_euler_nodes._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( plastic_strain_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( plastic_strain_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( plastic_strain_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -221,7 +227,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -240,7 +246,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -273,13 +279,13 @@ class OutputsPlasticStrainRotationByEulerNodes(_Outputs): def __init__(self, op: Operator): super().__init__(plastic_strain_rotation_by_euler_nodes._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( plastic_strain_rotation_by_euler_nodes._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/poynting_vector.py b/src/ansys/dpf/core/operators/result/poynting_vector.py index 489fd7736af..b7c31ed163e 100644 --- a/src/ansys/dpf/core/operators/result/poynting_vector.py +++ b/src/ansys/dpf/core/operators/result/poynting_vector.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class poynting_vector(Operator): r"""Compute the Poynting Vector @@ -222,23 +227,31 @@ class InputsPoyntingVector(_Inputs): def __init__(self, op: Operator): super().__init__(poynting_vector._spec().inputs, op) - self._fields_containerA = Input(poynting_vector._spec().input_pin(0), 0, op, -1) + self._fields_containerA: Input[FieldsContainer] = Input( + poynting_vector._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(poynting_vector._spec().input_pin(1), 1, op, -1) + self._fields_containerB: Input[FieldsContainer] = Input( + poynting_vector._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fields_containerB) - self._fields_containerC = Input(poynting_vector._spec().input_pin(2), 2, op, -1) + self._fields_containerC: Input[FieldsContainer] = Input( + poynting_vector._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_containerC) - self._fields_containerD = Input(poynting_vector._spec().input_pin(3), 3, op, -1) + self._fields_containerD: Input[FieldsContainer] = Input( + poynting_vector._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._fields_containerD) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( poynting_vector._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._int32 = Input(poynting_vector._spec().input_pin(5), 5, op, -1) + self._int32: Input[int] = Input(poynting_vector._spec().input_pin(5), 5, op, -1) self._inputs.append(self._int32) @property - def fields_containerA(self) -> Input: + def fields_containerA(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerA input to the operator. Returns @@ -257,7 +270,7 @@ def fields_containerA(self) -> Input: return self._fields_containerA @property - def fields_containerB(self) -> Input: + def fields_containerB(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerB input to the operator. Returns @@ -276,7 +289,7 @@ def fields_containerB(self) -> Input: return self._fields_containerB @property - def fields_containerC(self) -> Input: + def fields_containerC(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerC input to the operator. Returns @@ -295,7 +308,7 @@ def fields_containerC(self) -> Input: return self._fields_containerC @property - def fields_containerD(self) -> Input: + def fields_containerD(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerD input to the operator. Returns @@ -314,7 +327,7 @@ def fields_containerD(self) -> Input: return self._fields_containerD @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. the mesh region in this pin have to be boundary or skin mesh @@ -335,7 +348,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def int32(self) -> Input: + def int32(self) -> Input[int]: r"""Allows to connect int32 input to the operator. load step number, if it's specified, the Poynting Vector is computed only on the substeps of this step @@ -370,11 +383,13 @@ class OutputsPoyntingVector(_Outputs): def __init__(self, op: Operator): super().__init__(poynting_vector._spec().outputs, op) - self._fields_container = Output(poynting_vector._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + poynting_vector._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/poynting_vector_surface.py b/src/ansys/dpf/core/operators/result/poynting_vector_surface.py index 255fe9ca413..7f884eed3c6 100644 --- a/src/ansys/dpf/core/operators/result/poynting_vector_surface.py +++ b/src/ansys/dpf/core/operators/result/poynting_vector_surface.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class poynting_vector_surface(Operator): r"""Compute the Poynting Vector surface integral @@ -222,31 +227,33 @@ class InputsPoyntingVectorSurface(_Inputs): def __init__(self, op: Operator): super().__init__(poynting_vector_surface._spec().inputs, op) - self._fields_containerA = Input( + self._fields_containerA: Input[FieldsContainer] = Input( poynting_vector_surface._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input( + self._fields_containerB: Input[FieldsContainer] = Input( poynting_vector_surface._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._fields_containerB) - self._fields_containerC = Input( + self._fields_containerC: Input[FieldsContainer] = Input( poynting_vector_surface._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_containerC) - self._fields_containerD = Input( + self._fields_containerD: Input[FieldsContainer] = Input( poynting_vector_surface._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._fields_containerD) - self._abstract_meshed_region = Input( + self._abstract_meshed_region: Input[MeshedRegion] = Input( poynting_vector_surface._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._abstract_meshed_region) - self._int32 = Input(poynting_vector_surface._spec().input_pin(5), 5, op, -1) + self._int32: Input[int] = Input( + poynting_vector_surface._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._int32) @property - def fields_containerA(self) -> Input: + def fields_containerA(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerA input to the operator. Returns @@ -265,7 +272,7 @@ def fields_containerA(self) -> Input: return self._fields_containerA @property - def fields_containerB(self) -> Input: + def fields_containerB(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerB input to the operator. Returns @@ -284,7 +291,7 @@ def fields_containerB(self) -> Input: return self._fields_containerB @property - def fields_containerC(self) -> Input: + def fields_containerC(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerC input to the operator. Returns @@ -303,7 +310,7 @@ def fields_containerC(self) -> Input: return self._fields_containerC @property - def fields_containerD(self) -> Input: + def fields_containerD(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containerD input to the operator. Returns @@ -322,7 +329,7 @@ def fields_containerD(self) -> Input: return self._fields_containerD @property - def abstract_meshed_region(self) -> Input: + def abstract_meshed_region(self) -> Input[MeshedRegion]: r"""Allows to connect abstract_meshed_region input to the operator. the mesh region in this pin have to be boundary or skin mesh @@ -343,7 +350,7 @@ def abstract_meshed_region(self) -> Input: return self._abstract_meshed_region @property - def int32(self) -> Input: + def int32(self) -> Input[int]: r"""Allows to connect int32 input to the operator. load step number, if it's specified, the Poynting Vector is computed only on the substeps of this step @@ -378,13 +385,13 @@ class OutputsPoyntingVectorSurface(_Outputs): def __init__(self, op: Operator): super().__init__(poynting_vector_surface._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( poynting_vector_surface._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/pres_to_field.py b/src/ansys/dpf/core/operators/result/pres_to_field.py index 94f367a4aed..ad0826e0894 100644 --- a/src/ansys/dpf/core/operators/result/pres_to_field.py +++ b/src/ansys/dpf/core/operators/result/pres_to_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class pres_to_field(Operator): r"""Read the presol generated file from mapdl. @@ -156,13 +160,17 @@ class InputsPresToField(_Inputs): def __init__(self, op: Operator): super().__init__(pres_to_field._spec().inputs, op) - self._filepath = Input(pres_to_field._spec().input_pin(0), 0, op, -1) + self._filepath: Input[str] = Input( + pres_to_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._filepath) - self._columns_to_read = Input(pres_to_field._spec().input_pin(1), 1, op, -1) + self._columns_to_read: Input[int] = Input( + pres_to_field._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._columns_to_read) @property - def filepath(self) -> Input: + def filepath(self) -> Input[str]: r"""Allows to connect filepath input to the operator. filepath @@ -183,7 +191,7 @@ def filepath(self) -> Input: return self._filepath @property - def columns_to_read(self) -> Input: + def columns_to_read(self) -> Input[int]: r"""Allows to connect columns_to_read input to the operator. columns_to_read @@ -218,11 +226,11 @@ class OutputsPresToField(_Outputs): def __init__(self, op: Operator): super().__init__(pres_to_field._spec().outputs, op) - self._field = Output(pres_to_field._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(pres_to_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/pressure.py b/src/ansys/dpf/core/operators/result/pressure.py index 45c72037b01..e1b1d7eca21 100644 --- a/src/ansys/dpf/core/operators/result/pressure.py +++ b/src/ansys/dpf/core/operators/result/pressure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class pressure(Operator): r"""Read/compute Pressure by calling the readers defined by the datasources. @@ -249,23 +260,37 @@ class InputsPressure(_Inputs): def __init__(self, op: Operator): super().__init__(pressure._spec().inputs, op) - self._time_scoping = Input(pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + pressure._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + pressure._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(pressure._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + pressure._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(pressure._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + pressure._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(pressure._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + pressure._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(pressure._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + pressure._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(pressure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + pressure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -286,7 +311,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -307,7 +332,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -328,7 +353,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -349,7 +374,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -370,7 +395,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -391,7 +416,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -426,11 +451,13 @@ class OutputsPressure(_Outputs): def __init__(self, op: Operator): super().__init__(pressure._spec().outputs, op) - self._fields_container = Output(pressure._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + pressure._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/pretension.py b/src/ansys/dpf/core/operators/result/pretension.py index 73b3ad2129d..0552fa73b3b 100644 --- a/src/ansys/dpf/core/operators/result/pretension.py +++ b/src/ansys/dpf/core/operators/result/pretension.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class pretension(Operator): r"""Reads the pretension adjustment and tension force. Rotation is not @@ -297,31 +307,47 @@ class InputsPretension(_Inputs): def __init__(self, op: Operator): super().__init__(pretension._spec().inputs, op) - self._time_scoping = Input(pretension._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[ScopingsContainer | Scoping] = Input( + pretension._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(pretension._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + pretension._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(pretension._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + pretension._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(pretension._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + pretension._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(pretension._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + pretension._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(pretension._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + pretension._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(pretension._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + pretension._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( pretension._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(pretension._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( + pretension._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(pretension._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input(pretension._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -340,7 +366,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -359,7 +385,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -378,7 +404,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -397,7 +423,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -416,7 +442,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -437,7 +463,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -458,7 +484,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -479,7 +505,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -500,7 +526,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -536,13 +562,17 @@ class OutputsPretension(_Outputs): def __init__(self, op: Operator): super().__init__(pretension._spec().outputs, op) - self._adjustment = Output(pretension._spec().output_pin(0), 0, op) + self._adjustment: Output[FieldsContainer] = Output( + pretension._spec().output_pin(0), 0, op + ) self._outputs.append(self._adjustment) - self._tension_force = Output(pretension._spec().output_pin(1), 1, op) + self._tension_force: Output[FieldsContainer] = Output( + pretension._spec().output_pin(1), 1, op + ) self._outputs.append(self._tension_force) @property - def adjustment(self) -> Output: + def adjustment(self) -> Output[FieldsContainer]: r"""Allows to get adjustment output of the operator Adjustment @@ -562,7 +592,7 @@ def adjustment(self) -> Output: return self._adjustment @property - def tension_force(self) -> Output: + def tension_force(self) -> Output[FieldsContainer]: r"""Allows to get tension_force output of the operator Tension Force diff --git a/src/ansys/dpf/core/operators/result/prns_to_field.py b/src/ansys/dpf/core/operators/result/prns_to_field.py index dc3f0466b30..e03993d4bd4 100644 --- a/src/ansys/dpf/core/operators/result/prns_to_field.py +++ b/src/ansys/dpf/core/operators/result/prns_to_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class prns_to_field(Operator): r"""Read the presol of nodal field generated file from mapdl. @@ -156,13 +160,17 @@ class InputsPrnsToField(_Inputs): def __init__(self, op: Operator): super().__init__(prns_to_field._spec().inputs, op) - self._filepath = Input(prns_to_field._spec().input_pin(0), 0, op, -1) + self._filepath: Input[str] = Input( + prns_to_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._filepath) - self._columns_to_read = Input(prns_to_field._spec().input_pin(1), 1, op, -1) + self._columns_to_read: Input[int] = Input( + prns_to_field._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._columns_to_read) @property - def filepath(self) -> Input: + def filepath(self) -> Input[str]: r"""Allows to connect filepath input to the operator. filepath @@ -183,7 +191,7 @@ def filepath(self) -> Input: return self._filepath @property - def columns_to_read(self) -> Input: + def columns_to_read(self) -> Input[int]: r"""Allows to connect columns_to_read input to the operator. columns_to_read @@ -218,11 +226,11 @@ class OutputsPrnsToField(_Outputs): def __init__(self, op: Operator): super().__init__(prns_to_field._spec().outputs, op) - self._field = Output(prns_to_field._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(prns_to_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/raw_displacement.py b/src/ansys/dpf/core/operators/result/raw_displacement.py index f083aa54af3..036c8aa8028 100644 --- a/src/ansys/dpf/core/operators/result/raw_displacement.py +++ b/src/ansys/dpf/core/operators/result/raw_displacement.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class raw_displacement(Operator): r"""Read/compute U vector from the finite element problem KU=F by calling @@ -251,27 +262,37 @@ class InputsRawDisplacement(_Inputs): def __init__(self, op: Operator): super().__init__(raw_displacement._spec().inputs, op) - self._time_scoping = Input(raw_displacement._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + raw_displacement._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(raw_displacement._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + raw_displacement._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(raw_displacement._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + raw_displacement._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( raw_displacement._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(raw_displacement._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + raw_displacement._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( raw_displacement._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(raw_displacement._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + raw_displacement._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -292,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -313,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -334,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -355,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -376,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -397,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -432,11 +453,13 @@ class OutputsRawDisplacement(_Outputs): def __init__(self, op: Operator): super().__init__(raw_displacement._spec().outputs, op) - self._fields_container = Output(raw_displacement._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + raw_displacement._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/raw_reaction_force.py b/src/ansys/dpf/core/operators/result/raw_reaction_force.py index ea14aedce4b..97402b316c4 100644 --- a/src/ansys/dpf/core/operators/result/raw_reaction_force.py +++ b/src/ansys/dpf/core/operators/result/raw_reaction_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class raw_reaction_force(Operator): r"""Read/compute F vector from the finite element problem KU=F by calling @@ -251,29 +262,37 @@ class InputsRawReactionForce(_Inputs): def __init__(self, op: Operator): super().__init__(raw_reaction_force._spec().inputs, op) - self._time_scoping = Input(raw_reaction_force._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + raw_reaction_force._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(raw_reaction_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + raw_reaction_force._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( raw_reaction_force._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( raw_reaction_force._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(raw_reaction_force._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + raw_reaction_force._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( raw_reaction_force._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(raw_reaction_force._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + raw_reaction_force._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,11 +453,13 @@ class OutputsRawReactionForce(_Outputs): def __init__(self, op: Operator): super().__init__(raw_reaction_force._spec().outputs, op) - self._fields_container = Output(raw_reaction_force._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + raw_reaction_force._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/reaction_force.py b/src/ansys/dpf/core/operators/result/reaction_force.py index 12e520d8db3..552485715f8 100644 --- a/src/ansys/dpf/core/operators/result/reaction_force.py +++ b/src/ansys/dpf/core/operators/result/reaction_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class reaction_force(Operator): r"""Read/compute nodal reaction forces by calling the readers defined by the @@ -251,25 +262,37 @@ class InputsReactionForce(_Inputs): def __init__(self, op: Operator): super().__init__(reaction_force._spec().inputs, op) - self._time_scoping = Input(reaction_force._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + reaction_force._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(reaction_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + reaction_force._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(reaction_force._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + reaction_force._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(reaction_force._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + reaction_force._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(reaction_force._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + reaction_force._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( reaction_force._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(reaction_force._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + reaction_force._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -290,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -311,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -332,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -353,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -374,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -395,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -430,11 +453,13 @@ class OutputsReactionForce(_Outputs): def __init__(self, op: Operator): super().__init__(reaction_force._spec().outputs, op) - self._fields_container = Output(reaction_force._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + reaction_force._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/reaction_force_X.py b/src/ansys/dpf/core/operators/result/reaction_force_X.py index 888d627f8d5..291ca47e623 100644 --- a/src/ansys/dpf/core/operators/result/reaction_force_X.py +++ b/src/ansys/dpf/core/operators/result/reaction_force_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class reaction_force_X(Operator): r"""Read/compute nodal reaction forces X component of the vector (1st @@ -267,29 +278,41 @@ class InputsReactionForceX(_Inputs): def __init__(self, op: Operator): super().__init__(reaction_force_X._spec().inputs, op) - self._time_scoping = Input(reaction_force_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + reaction_force_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(reaction_force_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + reaction_force_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(reaction_force_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + reaction_force_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( reaction_force_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(reaction_force_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + reaction_force_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( reaction_force_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(reaction_force_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + reaction_force_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(reaction_force_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + reaction_force_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -310,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -331,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -352,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -373,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -394,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -415,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -436,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -471,11 +494,13 @@ class OutputsReactionForceX(_Outputs): def __init__(self, op: Operator): super().__init__(reaction_force_X._spec().outputs, op) - self._fields_container = Output(reaction_force_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + reaction_force_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/reaction_force_Y.py b/src/ansys/dpf/core/operators/result/reaction_force_Y.py index e7d89ef61ea..3ef47d3f823 100644 --- a/src/ansys/dpf/core/operators/result/reaction_force_Y.py +++ b/src/ansys/dpf/core/operators/result/reaction_force_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class reaction_force_Y(Operator): r"""Read/compute nodal reaction forces Y component of the vector (2nd @@ -267,29 +278,41 @@ class InputsReactionForceY(_Inputs): def __init__(self, op: Operator): super().__init__(reaction_force_Y._spec().inputs, op) - self._time_scoping = Input(reaction_force_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + reaction_force_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(reaction_force_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + reaction_force_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(reaction_force_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + reaction_force_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( reaction_force_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(reaction_force_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + reaction_force_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( reaction_force_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(reaction_force_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + reaction_force_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(reaction_force_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + reaction_force_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -310,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -331,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -352,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -373,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -394,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -415,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -436,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -471,11 +494,13 @@ class OutputsReactionForceY(_Outputs): def __init__(self, op: Operator): super().__init__(reaction_force_Y._spec().outputs, op) - self._fields_container = Output(reaction_force_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + reaction_force_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/reaction_force_Z.py b/src/ansys/dpf/core/operators/result/reaction_force_Z.py index cffb6e8eda9..9b9e8d1466e 100644 --- a/src/ansys/dpf/core/operators/result/reaction_force_Z.py +++ b/src/ansys/dpf/core/operators/result/reaction_force_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class reaction_force_Z(Operator): r"""Read/compute nodal reaction forces Z component of the vector (3rd @@ -267,29 +278,41 @@ class InputsReactionForceZ(_Inputs): def __init__(self, op: Operator): super().__init__(reaction_force_Z._spec().inputs, op) - self._time_scoping = Input(reaction_force_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + reaction_force_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(reaction_force_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + reaction_force_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(reaction_force_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + reaction_force_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( reaction_force_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(reaction_force_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + reaction_force_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( reaction_force_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(reaction_force_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + reaction_force_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(reaction_force_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + reaction_force_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -310,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -331,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -352,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -373,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -394,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -415,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -436,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -471,11 +494,13 @@ class OutputsReactionForceZ(_Outputs): def __init__(self, op: Operator): super().__init__(reaction_force_Z._spec().outputs, op) - self._fields_container = Output(reaction_force_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + reaction_force_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/read_cms_rbd_file.py b/src/ansys/dpf/core/operators/result/read_cms_rbd_file.py index bf7d90c5c80..1fe8ae26983 100644 --- a/src/ansys/dpf/core/operators/result/read_cms_rbd_file.py +++ b/src/ansys/dpf/core/operators/result/read_cms_rbd_file.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.property_field import PropertyField + class read_cms_rbd_file(Operator): r"""Read the invariant terms and the model data from a cms_rbd file @@ -321,13 +326,13 @@ class InputsReadCmsRbdFile(_Inputs): def __init__(self, op: Operator): super().__init__(read_cms_rbd_file._spec().inputs, op) - self._in_cms_rbd_file_path = Input( + self._in_cms_rbd_file_path: Input[str] = Input( read_cms_rbd_file._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._in_cms_rbd_file_path) @property - def in_cms_rbd_file_path(self) -> Input: + def in_cms_rbd_file_path(self) -> Input[str]: r"""Allows to connect in_cms_rbd_file_path input to the operator. file name with cms_rbd extension where to read the input cms_rbd file. @@ -384,57 +389,77 @@ class OutputsReadCmsRbdFile(_Outputs): def __init__(self, op: Operator): super().__init__(read_cms_rbd_file._spec().outputs, op) - self._model_data = Output(read_cms_rbd_file._spec().output_pin(0), 0, op) + self._model_data: Output[PropertyField] = Output( + read_cms_rbd_file._spec().output_pin(0), 0, op + ) self._outputs.append(self._model_data) - self._center_of_mass = Output(read_cms_rbd_file._spec().output_pin(1), 1, op) + self._center_of_mass: Output[Field] = Output( + read_cms_rbd_file._spec().output_pin(1), 1, op + ) self._outputs.append(self._center_of_mass) - self._inertia_relief = Output(read_cms_rbd_file._spec().output_pin(2), 2, op) + self._inertia_relief: Output[Field] = Output( + read_cms_rbd_file._spec().output_pin(2), 2, op + ) self._outputs.append(self._inertia_relief) - self._model_size = Output(read_cms_rbd_file._spec().output_pin(3), 3, op) + self._model_size: Output[PropertyField] = Output( + read_cms_rbd_file._spec().output_pin(3), 3, op + ) self._outputs.append(self._model_size) - self._master_node_coordinates = Output( + self._master_node_coordinates: Output = Output( read_cms_rbd_file._spec().output_pin(4), 4, op ) self._outputs.append(self._master_node_coordinates) - self._v_trsf = Output(read_cms_rbd_file._spec().output_pin(5), 5, op) + self._v_trsf: Output = Output(read_cms_rbd_file._spec().output_pin(5), 5, op) self._outputs.append(self._v_trsf) - self._k_mat = Output(read_cms_rbd_file._spec().output_pin(6), 6, op) + self._k_mat: Output[Field] = Output( + read_cms_rbd_file._spec().output_pin(6), 6, op + ) self._outputs.append(self._k_mat) - self._mass_mat = Output(read_cms_rbd_file._spec().output_pin(7), 7, op) + self._mass_mat: Output[Field] = Output( + read_cms_rbd_file._spec().output_pin(7), 7, op + ) self._outputs.append(self._mass_mat) - self._c_mat = Output(read_cms_rbd_file._spec().output_pin(8), 8, op) + self._c_mat: Output[Field] = Output( + read_cms_rbd_file._spec().output_pin(8), 8, op + ) self._outputs.append(self._c_mat) - self._rhs = Output(read_cms_rbd_file._spec().output_pin(9), 9, op) + self._rhs: Output[Field] = Output( + read_cms_rbd_file._spec().output_pin(9), 9, op + ) self._outputs.append(self._rhs) - self._dn = Output(read_cms_rbd_file._spec().output_pin(10), 10, op) + self._dn: Output = Output(read_cms_rbd_file._spec().output_pin(10), 10, op) self._outputs.append(self._dn) - self._dr_cross_n = Output(read_cms_rbd_file._spec().output_pin(11), 11, op) + self._dr_cross_n: Output = Output( + read_cms_rbd_file._spec().output_pin(11), 11, op + ) self._outputs.append(self._dr_cross_n) - self._drn = Output(read_cms_rbd_file._spec().output_pin(12), 12, op) + self._drn: Output = Output(read_cms_rbd_file._spec().output_pin(12), 12, op) self._outputs.append(self._drn) - self._dn_cross_n = Output(read_cms_rbd_file._spec().output_pin(13), 13, op) + self._dn_cross_n: Output = Output( + read_cms_rbd_file._spec().output_pin(13), 13, op + ) self._outputs.append(self._dn_cross_n) - self._dnx_y = Output(read_cms_rbd_file._spec().output_pin(14), 14, op) + self._dnx_y: Output = Output(read_cms_rbd_file._spec().output_pin(14), 14, op) self._outputs.append(self._dnx_y) - self._dny_y = Output(read_cms_rbd_file._spec().output_pin(15), 15, op) + self._dny_y: Output = Output(read_cms_rbd_file._spec().output_pin(15), 15, op) self._outputs.append(self._dny_y) - self._dnz_y = Output(read_cms_rbd_file._spec().output_pin(16), 16, op) + self._dnz_y: Output = Output(read_cms_rbd_file._spec().output_pin(16), 16, op) self._outputs.append(self._dnz_y) - self._dyx_n = Output(read_cms_rbd_file._spec().output_pin(17), 17, op) + self._dyx_n: Output = Output(read_cms_rbd_file._spec().output_pin(17), 17, op) self._outputs.append(self._dyx_n) - self._dyy_n = Output(read_cms_rbd_file._spec().output_pin(18), 18, op) + self._dyy_n: Output = Output(read_cms_rbd_file._spec().output_pin(18), 18, op) self._outputs.append(self._dyy_n) - self._dyz_n = Output(read_cms_rbd_file._spec().output_pin(19), 19, op) + self._dyz_n: Output = Output(read_cms_rbd_file._spec().output_pin(19), 19, op) self._outputs.append(self._dyz_n) - self._dnxn = Output(read_cms_rbd_file._spec().output_pin(20), 20, op) + self._dnxn: Output = Output(read_cms_rbd_file._spec().output_pin(20), 20, op) self._outputs.append(self._dnxn) - self._dnyn = Output(read_cms_rbd_file._spec().output_pin(21), 21, op) + self._dnyn: Output = Output(read_cms_rbd_file._spec().output_pin(21), 21, op) self._outputs.append(self._dnyn) - self._dnzn = Output(read_cms_rbd_file._spec().output_pin(22), 22, op) + self._dnzn: Output = Output(read_cms_rbd_file._spec().output_pin(22), 22, op) self._outputs.append(self._dnzn) @property - def model_data(self) -> Output: + def model_data(self) -> Output[PropertyField]: r"""Allows to get model_data output of the operator data describing the finite element model @@ -454,7 +479,7 @@ def model_data(self) -> Output: return self._model_data @property - def center_of_mass(self) -> Output: + def center_of_mass(self) -> Output[Field]: r"""Allows to get center_of_mass output of the operator center of mass of the body @@ -474,7 +499,7 @@ def center_of_mass(self) -> Output: return self._center_of_mass @property - def inertia_relief(self) -> Output: + def inertia_relief(self) -> Output[Field]: r"""Allows to get inertia_relief output of the operator inertia matrix @@ -494,7 +519,7 @@ def inertia_relief(self) -> Output: return self._inertia_relief @property - def model_size(self) -> Output: + def model_size(self) -> Output[PropertyField]: r"""Allows to get model_size output of the operator Returns @@ -550,7 +575,7 @@ def v_trsf(self) -> Output: return self._v_trsf @property - def k_mat(self) -> Output: + def k_mat(self) -> Output[Field]: r"""Allows to get k_mat output of the operator Returns @@ -568,7 +593,7 @@ def k_mat(self) -> Output: return self._k_mat @property - def mass_mat(self) -> Output: + def mass_mat(self) -> Output[Field]: r"""Allows to get mass_mat output of the operator Returns @@ -586,7 +611,7 @@ def mass_mat(self) -> Output: return self._mass_mat @property - def c_mat(self) -> Output: + def c_mat(self) -> Output[Field]: r"""Allows to get c_mat output of the operator Returns @@ -604,7 +629,7 @@ def c_mat(self) -> Output: return self._c_mat @property - def rhs(self) -> Output: + def rhs(self) -> Output[Field]: r"""Allows to get rhs output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/recombine_harmonic_indeces_cyclic.py b/src/ansys/dpf/core/operators/result/recombine_harmonic_indeces_cyclic.py index ee6c01116e4..bed9a82b6a2 100644 --- a/src/ansys/dpf/core/operators/result/recombine_harmonic_indeces_cyclic.py +++ b/src/ansys/dpf/core/operators/result/recombine_harmonic_indeces_cyclic.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class recombine_harmonic_indeces_cyclic(Operator): r"""Add the fields corresponding to different load steps with the same @@ -163,17 +167,17 @@ class InputsRecombineHarmonicIndecesCyclic(_Inputs): def __init__(self, op: Operator): super().__init__(recombine_harmonic_indeces_cyclic._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( recombine_harmonic_indeces_cyclic._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._is_constant = Input( + self._is_constant: Input[bool] = Input( recombine_harmonic_indeces_cyclic._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._is_constant) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -192,7 +196,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def is_constant(self) -> Input: + def is_constant(self) -> Input[bool]: r"""Allows to connect is_constant input to the operator. If the result is constant, it will only copy the first result found. @@ -227,13 +231,13 @@ class OutputsRecombineHarmonicIndecesCyclic(_Outputs): def __init__(self, op: Operator): super().__init__(recombine_harmonic_indeces_cyclic._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( recombine_harmonic_indeces_cyclic._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/remove_rigid_body_motion.py b/src/ansys/dpf/core/operators/result/remove_rigid_body_motion.py index 9a185cedd8e..072d6379b6b 100644 --- a/src/ansys/dpf/core/operators/result/remove_rigid_body_motion.py +++ b/src/ansys/dpf/core/operators/result/remove_rigid_body_motion.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class remove_rigid_body_motion(Operator): r"""Removes rigid body mode from a total displacement field by minimization. @@ -204,17 +210,21 @@ class InputsRemoveRigidBodyMotion(_Inputs): def __init__(self, op: Operator): super().__init__(remove_rigid_body_motion._spec().inputs, op) - self._field = Input(remove_rigid_body_motion._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + remove_rigid_body_motion._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._reference_node_id = Input( + self._reference_node_id: Input[int] = Input( remove_rigid_body_motion._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._reference_node_id) - self._mesh = Input(remove_rigid_body_motion._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + remove_rigid_body_motion._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -235,7 +245,7 @@ def field(self) -> Input: return self._field @property - def reference_node_id(self) -> Input: + def reference_node_id(self) -> Input[int]: r"""Allows to connect reference_node_id input to the operator. Id of the reference entity (node). @@ -256,7 +266,7 @@ def reference_node_id(self) -> Input: return self._reference_node_id @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. default is the mesh in the support @@ -294,23 +304,25 @@ class OutputsRemoveRigidBodyMotion(_Outputs): def __init__(self, op: Operator): super().__init__(remove_rigid_body_motion._spec().outputs, op) - self._field = Output(remove_rigid_body_motion._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + remove_rigid_body_motion._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) - self._translation_field = Output( + self._translation_field: Output[Field] = Output( remove_rigid_body_motion._spec().output_pin(1), 1, op ) self._outputs.append(self._translation_field) - self._rotation_field = Output( + self._rotation_field: Output[Field] = Output( remove_rigid_body_motion._spec().output_pin(2), 2, op ) self._outputs.append(self._rotation_field) - self._center_field = Output( + self._center_field: Output[Field] = Output( remove_rigid_body_motion._spec().output_pin(3), 3, op ) self._outputs.append(self._center_field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns @@ -328,7 +340,7 @@ def field(self) -> Output: return self._field @property - def translation_field(self) -> Output: + def translation_field(self) -> Output[Field]: r"""Allows to get translation_field output of the operator Global rigid translation vector @@ -348,7 +360,7 @@ def translation_field(self) -> Output: return self._translation_field @property - def rotation_field(self) -> Output: + def rotation_field(self) -> Output[Field]: r"""Allows to get rotation_field output of the operator Global rigid rotation angles @@ -368,7 +380,7 @@ def rotation_field(self) -> Output: return self._rotation_field @property - def center_field(self) -> Output: + def center_field(self) -> Output[Field]: r"""Allows to get center_field output of the operator Center of the rigid rotation diff --git a/src/ansys/dpf/core/operators/result/remove_rigid_body_motion_fc.py b/src/ansys/dpf/core/operators/result/remove_rigid_body_motion_fc.py index b18d5e540c0..62839d5b9ef 100644 --- a/src/ansys/dpf/core/operators/result/remove_rigid_body_motion_fc.py +++ b/src/ansys/dpf/core/operators/result/remove_rigid_body_motion_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class remove_rigid_body_motion_fc(Operator): r"""Removes rigid body mode from a total displacement field by minimization. @@ -209,19 +215,21 @@ class InputsRemoveRigidBodyMotionFc(_Inputs): def __init__(self, op: Operator): super().__init__(remove_rigid_body_motion_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( remove_rigid_body_motion_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._reference_node_id = Input( + self._reference_node_id: Input[int] = Input( remove_rigid_body_motion_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._reference_node_id) - self._mesh = Input(remove_rigid_body_motion_fc._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + remove_rigid_body_motion_fc._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -242,7 +250,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def reference_node_id(self) -> Input: + def reference_node_id(self) -> Input[int]: r"""Allows to connect reference_node_id input to the operator. Id of the reference entity (node). @@ -263,7 +271,7 @@ def reference_node_id(self) -> Input: return self._reference_node_id @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. default is the mesh in the support @@ -301,25 +309,25 @@ class OutputsRemoveRigidBodyMotionFc(_Outputs): def __init__(self, op: Operator): super().__init__(remove_rigid_body_motion_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( remove_rigid_body_motion_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) - self._translation_field = Output( + self._translation_field: Output[Field] = Output( remove_rigid_body_motion_fc._spec().output_pin(1), 1, op ) self._outputs.append(self._translation_field) - self._rotation_field = Output( + self._rotation_field: Output[Field] = Output( remove_rigid_body_motion_fc._spec().output_pin(2), 2, op ) self._outputs.append(self._rotation_field) - self._center_field = Output( + self._center_field: Output[Field] = Output( remove_rigid_body_motion_fc._spec().output_pin(3), 3, op ) self._outputs.append(self._center_field) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns @@ -337,7 +345,7 @@ def fields_container(self) -> Output: return self._fields_container @property - def translation_field(self) -> Output: + def translation_field(self) -> Output[Field]: r"""Allows to get translation_field output of the operator Global rigid translation vector @@ -357,7 +365,7 @@ def translation_field(self) -> Output: return self._translation_field @property - def rotation_field(self) -> Output: + def rotation_field(self) -> Output[Field]: r"""Allows to get rotation_field output of the operator Global rigid rotation angles @@ -377,7 +385,7 @@ def rotation_field(self) -> Output: return self._rotation_field @property - def center_field(self) -> Output: + def center_field(self) -> Output[Field]: r"""Allows to get center_field output of the operator Center of the rigid rotation diff --git a/src/ansys/dpf/core/operators/result/result_provider.py b/src/ansys/dpf/core/operators/result/result_provider.py index f2be2207a71..1f051496c2b 100644 --- a/src/ansys/dpf/core/operators/result/result_provider.py +++ b/src/ansys/dpf/core/operators/result/result_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.core import errors @@ -15,6 +16,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class result_provider(Operator): r"""Read/compute user defined result by calling the readers defined by the @@ -290,31 +301,45 @@ class InputsResultProvider(_Inputs): def __init__(self, op: Operator): super().__init__(result_provider._spec().inputs, op) - self._time_scoping = Input(result_provider._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + result_provider._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(result_provider._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + result_provider._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(result_provider._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + result_provider._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(result_provider._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + result_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(result_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + result_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( result_provider._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(result_provider._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + result_provider._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._result_name = Input(result_provider._spec().input_pin(60), 60, op, -1) + self._result_name: Input = Input( + result_provider._spec().input_pin(60), 60, op, -1 + ) self._inputs.append(self._result_name) - self._result_scripting_name = Input( + self._result_scripting_name: Input = Input( result_provider._spec().input_pin(64), 64, op, -1 ) self._inputs.append(self._result_scripting_name) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -335,7 +360,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -356,7 +381,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -377,7 +402,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -398,7 +423,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -419,7 +444,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -440,7 +465,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -517,11 +542,13 @@ class OutputsResultProvider(_Outputs): def __init__(self, op: Operator): super().__init__(result_provider._spec().outputs, op) - self._fields_container = Output(result_provider._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + result_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/rigid_transformation.py b/src/ansys/dpf/core/operators/result/rigid_transformation.py index 02f3f42fc5d..634e9c74663 100644 --- a/src/ansys/dpf/core/operators/result/rigid_transformation.py +++ b/src/ansys/dpf/core/operators/result/rigid_transformation.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class rigid_transformation(Operator): r"""Extracts rigid body motions from a displacement in input. @@ -162,15 +168,17 @@ class InputsRigidTransformation(_Inputs): def __init__(self, op: Operator): super().__init__(rigid_transformation._spec().inputs, op) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( rigid_transformation._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(rigid_transformation._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + rigid_transformation._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. streams (result file container) (optional) @@ -191,7 +199,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. If the stream is null, retrieves the file path from the data sources. @@ -226,13 +234,13 @@ class OutputsRigidTransformation(_Outputs): def __init__(self, op: Operator): super().__init__(rigid_transformation._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( rigid_transformation._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/rigid_transformation_provider.py b/src/ansys/dpf/core/operators/result/rigid_transformation_provider.py index 9ae28ced206..04c6ed1f2e1 100644 --- a/src/ansys/dpf/core/operators/result/rigid_transformation_provider.py +++ b/src/ansys/dpf/core/operators/result/rigid_transformation_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + class rigid_transformation_provider(Operator): r"""Extracts rigid body transformation from dsub file. @@ -162,11 +167,11 @@ class InputsRigidTransformationProvider(_Inputs): def __init__(self, op: Operator): super().__init__(rigid_transformation_provider._spec().inputs, op) - self._streams = Input( + self._streams: Input = Input( rigid_transformation_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( rigid_transformation_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @@ -193,7 +198,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. data source of dsub file. @@ -228,13 +233,13 @@ class OutputsRigidTransformationProvider(_Outputs): def __init__(self, op: Operator): super().__init__(rigid_transformation_provider._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( rigid_transformation_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/rms_static_pressure.py b/src/ansys/dpf/core/operators/result/rms_static_pressure.py index e9ce1b9e876..c33b9986411 100644 --- a/src/ansys/dpf/core/operators/result/rms_static_pressure.py +++ b/src/ansys/dpf/core/operators/result/rms_static_pressure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class rms_static_pressure(Operator): r"""Read RMS Static Pressure by calling the readers defined by the @@ -267,33 +278,41 @@ class InputsRmsStaticPressure(_Inputs): def __init__(self, op: Operator): super().__init__(rms_static_pressure._spec().inputs, op) - self._time_scoping = Input(rms_static_pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + rms_static_pressure._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(rms_static_pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + rms_static_pressure._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( rms_static_pressure._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(rms_static_pressure._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + rms_static_pressure._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(rms_static_pressure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + rms_static_pressure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( rms_static_pressure._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( rms_static_pressure._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( rms_static_pressure._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,13 +494,13 @@ class OutputsRmsStaticPressure(_Outputs): def __init__(self, op: Operator): super().__init__(rms_static_pressure._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( rms_static_pressure._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/rms_temperature.py b/src/ansys/dpf/core/operators/result/rms_temperature.py index 0f70f3011a0..a9b4403c3b8 100644 --- a/src/ansys/dpf/core/operators/result/rms_temperature.py +++ b/src/ansys/dpf/core/operators/result/rms_temperature.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class rms_temperature(Operator): r"""Read RMS Temperature by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsRmsTemperature(_Inputs): def __init__(self, op: Operator): super().__init__(rms_temperature._spec().inputs, op) - self._time_scoping = Input(rms_temperature._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + rms_temperature._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(rms_temperature._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + rms_temperature._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(rms_temperature._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + rms_temperature._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(rms_temperature._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + rms_temperature._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(rms_temperature._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + rms_temperature._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(rms_temperature._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + rms_temperature._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(rms_temperature._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + rms_temperature._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(rms_temperature._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + rms_temperature._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsRmsTemperature(_Outputs): def __init__(self, op: Operator): super().__init__(rms_temperature._spec().outputs, op) - self._fields_container = Output(rms_temperature._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + rms_temperature._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/rms_velocity.py b/src/ansys/dpf/core/operators/result/rms_velocity.py index d667ceb1cd8..be83cdd1d6c 100644 --- a/src/ansys/dpf/core/operators/result/rms_velocity.py +++ b/src/ansys/dpf/core/operators/result/rms_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class rms_velocity(Operator): r"""Read RMS Velocity by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsRmsVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(rms_velocity._spec().inputs, op) - self._time_scoping = Input(rms_velocity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + rms_velocity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(rms_velocity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + rms_velocity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(rms_velocity._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + rms_velocity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(rms_velocity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + rms_velocity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(rms_velocity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + rms_velocity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(rms_velocity._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + rms_velocity._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(rms_velocity._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + rms_velocity._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(rms_velocity._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + rms_velocity._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsRmsVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(rms_velocity._spec().outputs, op) - self._fields_container = Output(rms_velocity._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + rms_velocity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/rom_data_provider.py b/src/ansys/dpf/core/operators/result/rom_data_provider.py index 70df58ab5ed..206f8cd208a 100644 --- a/src/ansys/dpf/core/operators/result/rom_data_provider.py +++ b/src/ansys/dpf/core/operators/result/rom_data_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.property_field import PropertyField + class rom_data_provider(Operator): r"""Set the required data for the invariant terms computation (reduced @@ -349,35 +357,41 @@ class InputsRomDataProvider(_Inputs): def __init__(self, op: Operator): super().__init__(rom_data_provider._spec().inputs, op) - self._rom_type = Input(rom_data_provider._spec().input_pin(0), 0, op, -1) + self._rom_type: Input[bool] = Input( + rom_data_provider._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._rom_type) - self._reduced_stiff_matrix = Input( + self._reduced_stiff_matrix: Input[FieldsContainer] = Input( rom_data_provider._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._reduced_stiff_matrix) - self._reduced_damping_matrix = Input( + self._reduced_damping_matrix: Input[FieldsContainer] = Input( rom_data_provider._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._reduced_damping_matrix) - self._reduced_mass_matrix = Input( + self._reduced_mass_matrix: Input[FieldsContainer] = Input( rom_data_provider._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._reduced_mass_matrix) - self._data_sources = Input(rom_data_provider._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + rom_data_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._reduced_rhs_vector = Input( + self._reduced_rhs_vector: Input[FieldsContainer] = Input( rom_data_provider._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._reduced_rhs_vector) - self._lumped_mass_matrix = Input( + self._lumped_mass_matrix: Input[FieldsContainer] = Input( rom_data_provider._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._lumped_mass_matrix) - self._mode_shapes = Input(rom_data_provider._spec().input_pin(7), 7, op, -1) + self._mode_shapes: Input[FieldsContainer] = Input( + rom_data_provider._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mode_shapes) @property - def rom_type(self) -> Input: + def rom_type(self) -> Input[bool]: r"""Allows to connect rom_type input to the operator. If this pin is set to true, customized rom data must be given @@ -398,7 +412,7 @@ def rom_type(self) -> Input: return self._rom_type @property - def reduced_stiff_matrix(self) -> Input: + def reduced_stiff_matrix(self) -> Input[FieldsContainer]: r"""Allows to connect reduced_stiff_matrix input to the operator. FieldsContainers containing the reduced Stiffness matrix @@ -419,7 +433,7 @@ def reduced_stiff_matrix(self) -> Input: return self._reduced_stiff_matrix @property - def reduced_damping_matrix(self) -> Input: + def reduced_damping_matrix(self) -> Input[FieldsContainer]: r"""Allows to connect reduced_damping_matrix input to the operator. FieldsContainers containing the reduced Mass matrix @@ -440,7 +454,7 @@ def reduced_damping_matrix(self) -> Input: return self._reduced_damping_matrix @property - def reduced_mass_matrix(self) -> Input: + def reduced_mass_matrix(self) -> Input[FieldsContainer]: r"""Allows to connect reduced_mass_matrix input to the operator. FieldsContainers containing the reduced Damp matrix @@ -461,7 +475,7 @@ def reduced_mass_matrix(self) -> Input: return self._reduced_mass_matrix @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -480,7 +494,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def reduced_rhs_vector(self) -> Input: + def reduced_rhs_vector(self) -> Input[FieldsContainer]: r"""Allows to connect reduced_rhs_vector input to the operator. FieldsContainers containing the reduced RHS vector @@ -501,7 +515,7 @@ def reduced_rhs_vector(self) -> Input: return self._reduced_rhs_vector @property - def lumped_mass_matrix(self) -> Input: + def lumped_mass_matrix(self) -> Input[FieldsContainer]: r"""Allows to connect lumped_mass_matrix input to the operator. FieldsContainers containing the lumped Mass matrix @@ -522,7 +536,7 @@ def lumped_mass_matrix(self) -> Input: return self._lumped_mass_matrix @property - def mode_shapes(self) -> Input: + def mode_shapes(self) -> Input[FieldsContainer]: r"""Allows to connect mode_shapes input to the operator. FieldsContainers containing the customized mode shapes @@ -567,33 +581,51 @@ class OutputsRomDataProvider(_Outputs): def __init__(self, op: Operator): super().__init__(rom_data_provider._spec().outputs, op) - self._rom_matrices = Output(rom_data_provider._spec().output_pin(0), 0, op) + self._rom_matrices: Output[FieldsContainer] = Output( + rom_data_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._rom_matrices) - self._mode_shapes = Output(rom_data_provider._spec().output_pin(1), 1, op) + self._mode_shapes: Output[FieldsContainer] = Output( + rom_data_provider._spec().output_pin(1), 1, op + ) self._outputs.append(self._mode_shapes) - self._lumped_mass = Output(rom_data_provider._spec().output_pin(2), 2, op) + self._lumped_mass: Output[FieldsContainer] = Output( + rom_data_provider._spec().output_pin(2), 2, op + ) self._outputs.append(self._lumped_mass) - self._model_data = Output(rom_data_provider._spec().output_pin(3), 3, op) + self._model_data: Output[PropertyField] = Output( + rom_data_provider._spec().output_pin(3), 3, op + ) self._outputs.append(self._model_data) - self._center_of_mass = Output(rom_data_provider._spec().output_pin(4), 4, op) + self._center_of_mass: Output[PropertyField] = Output( + rom_data_provider._spec().output_pin(4), 4, op + ) self._outputs.append(self._center_of_mass) - self._inertia_relief = Output(rom_data_provider._spec().output_pin(5), 5, op) + self._inertia_relief: Output[Field] = Output( + rom_data_provider._spec().output_pin(5), 5, op + ) self._outputs.append(self._inertia_relief) - self._model_size = Output(rom_data_provider._spec().output_pin(6), 6, op) + self._model_size: Output[float] = Output( + rom_data_provider._spec().output_pin(6), 6, op + ) self._outputs.append(self._model_size) - self._field_coordinates_and_euler_angles = Output( + self._field_coordinates_and_euler_angles: Output[float] = Output( rom_data_provider._spec().output_pin(7), 7, op ) self._outputs.append(self._field_coordinates_and_euler_angles) - self._nod = Output(rom_data_provider._spec().output_pin(8), 8, op) + self._nod: Output = Output(rom_data_provider._spec().output_pin(8), 8, op) self._outputs.append(self._nod) - self._meshed_region = Output(rom_data_provider._spec().output_pin(9), 9, op) + self._meshed_region: Output[MeshedRegion] = Output( + rom_data_provider._spec().output_pin(9), 9, op + ) self._outputs.append(self._meshed_region) - self._phi_ortho = Output(rom_data_provider._spec().output_pin(10), 10, op) + self._phi_ortho: Output[FieldsContainer] = Output( + rom_data_provider._spec().output_pin(10), 10, op + ) self._outputs.append(self._phi_ortho) @property - def rom_matrices(self) -> Output: + def rom_matrices(self) -> Output[FieldsContainer]: r"""Allows to get rom_matrices output of the operator FieldsContainers containing the reduced matrices @@ -613,7 +645,7 @@ def rom_matrices(self) -> Output: return self._rom_matrices @property - def mode_shapes(self) -> Output: + def mode_shapes(self) -> Output[FieldsContainer]: r"""Allows to get mode_shapes output of the operator FieldsContainers containing the mode shapes, which are CST and NOR for the cms method @@ -633,7 +665,7 @@ def mode_shapes(self) -> Output: return self._mode_shapes @property - def lumped_mass(self) -> Output: + def lumped_mass(self) -> Output[FieldsContainer]: r"""Allows to get lumped_mass output of the operator FieldsContainers containing the lumped mass @@ -653,7 +685,7 @@ def lumped_mass(self) -> Output: return self._lumped_mass @property - def model_data(self) -> Output: + def model_data(self) -> Output[PropertyField]: r"""Allows to get model_data output of the operator data describing the finite element model @@ -673,7 +705,7 @@ def model_data(self) -> Output: return self._model_data @property - def center_of_mass(self) -> Output: + def center_of_mass(self) -> Output[PropertyField]: r"""Allows to get center_of_mass output of the operator Returns @@ -691,7 +723,7 @@ def center_of_mass(self) -> Output: return self._center_of_mass @property - def inertia_relief(self) -> Output: + def inertia_relief(self) -> Output[Field]: r"""Allows to get inertia_relief output of the operator inertia matrix @@ -711,7 +743,7 @@ def inertia_relief(self) -> Output: return self._inertia_relief @property - def model_size(self) -> Output: + def model_size(self) -> Output[float]: r"""Allows to get model_size output of the operator size of the model @@ -731,7 +763,7 @@ def model_size(self) -> Output: return self._model_size @property - def field_coordinates_and_euler_angles(self) -> Output: + def field_coordinates_and_euler_angles(self) -> Output[float]: r"""Allows to get field_coordinates_and_euler_angles output of the operator coordinates and euler angles of all nodes @@ -771,7 +803,7 @@ def nod(self) -> Output: return self._nod @property - def meshed_region(self) -> Output: + def meshed_region(self) -> Output[MeshedRegion]: r"""Allows to get meshed_region output of the operator expanded meshed region. @@ -791,7 +823,7 @@ def meshed_region(self) -> Output: return self._meshed_region @property - def phi_ortho(self) -> Output: + def phi_ortho(self) -> Output[FieldsContainer]: r"""Allows to get phi_ortho output of the operator Orthonormalized mode shape transformation diff --git a/src/ansys/dpf/core/operators/result/run.py b/src/ansys/dpf/core/operators/result/run.py index 826a4956b3b..eb5134fc14e 100644 --- a/src/ansys/dpf/core/operators/result/run.py +++ b/src/ansys/dpf/core/operators/result/run.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + class run(Operator): r"""Solve in mapdl a dat/inp file and returns a datasources with the rst @@ -245,21 +249,25 @@ class InputsRun(_Inputs): def __init__(self, op: Operator): super().__init__(run._spec().inputs, op) - self._mapdl_exe_path = Input(run._spec().input_pin(0), 0, op, -1) + self._mapdl_exe_path: Input[str] = Input(run._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mapdl_exe_path) - self._working_dir = Input(run._spec().input_pin(1), 1, op, -1) + self._working_dir: Input[str] = Input(run._spec().input_pin(1), 1, op, -1) self._inputs.append(self._working_dir) - self._number_of_processes = Input(run._spec().input_pin(2), 2, op, -1) + self._number_of_processes: Input[int] = Input( + run._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._number_of_processes) - self._number_of_threads = Input(run._spec().input_pin(3), 3, op, -1) + self._number_of_threads: Input[int] = Input(run._spec().input_pin(3), 3, op, -1) self._inputs.append(self._number_of_threads) - self._data_sources = Input(run._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + run._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._server_mode = Input(run._spec().input_pin(5), 5, op, -1) + self._server_mode: Input[bool] = Input(run._spec().input_pin(5), 5, op, -1) self._inputs.append(self._server_mode) @property - def mapdl_exe_path(self) -> Input: + def mapdl_exe_path(self) -> Input[str]: r"""Allows to connect mapdl_exe_path input to the operator. Returns @@ -278,7 +286,7 @@ def mapdl_exe_path(self) -> Input: return self._mapdl_exe_path @property - def working_dir(self) -> Input: + def working_dir(self) -> Input[str]: r"""Allows to connect working_dir input to the operator. Returns @@ -297,7 +305,7 @@ def working_dir(self) -> Input: return self._working_dir @property - def number_of_processes(self) -> Input: + def number_of_processes(self) -> Input[int]: r"""Allows to connect number_of_processes input to the operator. Set the number of MPI processes used for resolution (default is 2) @@ -318,7 +326,7 @@ def number_of_processes(self) -> Input: return self._number_of_processes @property - def number_of_threads(self) -> Input: + def number_of_threads(self) -> Input[int]: r"""Allows to connect number_of_threads input to the operator. Set the number of threads used for resolution (default is 1) @@ -339,7 +347,7 @@ def number_of_threads(self) -> Input: return self._number_of_threads @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. data sources containing the input file. @@ -360,7 +368,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def server_mode(self) -> Input: + def server_mode(self) -> Input[bool]: r"""Allows to connect server_mode input to the operator. used when a user includes commands in the input file allowing to launch DPF server inside MAPDL to interact with MAPDL using DPF client API @@ -397,15 +405,17 @@ class OutputsRun(_Outputs): def __init__(self, op: Operator): super().__init__(run._spec().outputs, op) - self._data_sources = Output(run._spec().output_pin(0), 0, op) + self._data_sources: Output[DataSources] = Output( + run._spec().output_pin(0), 0, op + ) self._outputs.append(self._data_sources) - self._ip = Output(run._spec().output_pin(1), 1, op) + self._ip: Output[str] = Output(run._spec().output_pin(1), 1, op) self._outputs.append(self._ip) - self._port = Output(run._spec().output_pin(2), 2, op) + self._port: Output[str] = Output(run._spec().output_pin(2), 2, op) self._outputs.append(self._port) @property - def data_sources(self) -> Output: + def data_sources(self) -> Output[DataSources]: r"""Allows to get data_sources output of the operator returns the data source if the server_mode pin is not set to yes @@ -425,7 +435,7 @@ def data_sources(self) -> Output: return self._data_sources @property - def ip(self) -> Output: + def ip(self) -> Output[str]: r"""Allows to get ip output of the operator returns the Ip if the server_mode pin is set to yes @@ -445,7 +455,7 @@ def ip(self) -> Output: return self._ip @property - def port(self) -> Output: + def port(self) -> Output[str]: r"""Allows to get port output of the operator returns a port when the server mode pin is set to yes diff --git a/src/ansys/dpf/core/operators/result/smisc.py b/src/ansys/dpf/core/operators/result/smisc.py index 248250e4eae..d9dc82bcfa0 100644 --- a/src/ansys/dpf/core/operators/result/smisc.py +++ b/src/ansys/dpf/core/operators/result/smisc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.core import errors @@ -15,6 +16,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class smisc(Operator): r"""Read/compute elemental summable miscellaneous data by calling the @@ -362,35 +373,55 @@ class InputsSmisc(_Inputs): def __init__(self, op: Operator): super().__init__(smisc._spec().inputs, op) - self._time_scoping = Input(smisc._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + smisc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(smisc._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + smisc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(smisc._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + smisc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(smisc._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + smisc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(smisc._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + smisc._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(smisc._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + smisc._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(smisc._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + smisc._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._item_index = Input(smisc._spec().input_pin(10), 10, op, -1) + self._item_index: Input[int] = Input(smisc._spec().input_pin(10), 10, op, -1) self._inputs.append(self._item_index) - self._num_components = Input(smisc._spec().input_pin(11), 11, op, -1) + self._num_components: Input[int] = Input( + smisc._spec().input_pin(11), 11, op, -1 + ) self._inputs.append(self._num_components) - self._read_cyclic = Input(smisc._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input(smisc._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(smisc._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( + smisc._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(smisc._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( + smisc._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(smisc._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input(smisc._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -411,7 +442,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -432,7 +463,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -453,7 +484,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -474,7 +505,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -495,7 +526,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -516,7 +547,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -537,7 +568,7 @@ def mesh(self) -> Input: return self._mesh @property - def item_index(self) -> Input: + def item_index(self) -> Input[int]: r"""Allows to connect item_index input to the operator. Index of requested item. @@ -558,7 +589,7 @@ def item_index(self) -> Input: return self._item_index @property - def num_components(self) -> Input: + def num_components(self) -> Input[int]: r"""Allows to connect num_components input to the operator. Number of components for the requested item. @@ -579,7 +610,7 @@ def num_components(self) -> Input: return self._num_components @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -600,7 +631,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -621,7 +652,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -642,7 +673,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -677,11 +708,13 @@ class OutputsSmisc(_Outputs): def __init__(self, op: Operator): super().__init__(smisc._spec().outputs, op) - self._fields_container = Output(smisc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + smisc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/specific_heat.py b/src/ansys/dpf/core/operators/result/specific_heat.py index c96ee334294..88be7a37270 100644 --- a/src/ansys/dpf/core/operators/result/specific_heat.py +++ b/src/ansys/dpf/core/operators/result/specific_heat.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class specific_heat(Operator): r"""Read Specific Heat by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsSpecificHeat(_Inputs): def __init__(self, op: Operator): super().__init__(specific_heat._spec().inputs, op) - self._time_scoping = Input(specific_heat._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + specific_heat._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(specific_heat._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + specific_heat._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(specific_heat._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + specific_heat._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(specific_heat._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + specific_heat._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(specific_heat._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + specific_heat._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(specific_heat._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + specific_heat._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(specific_heat._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + specific_heat._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(specific_heat._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + specific_heat._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsSpecificHeat(_Outputs): def __init__(self, op: Operator): super().__init__(specific_heat._spec().outputs, op) - self._fields_container = Output(specific_heat._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + specific_heat._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/spectrum_data.py b/src/ansys/dpf/core/operators/result/spectrum_data.py index 798d75fcbb3..997136b224b 100644 --- a/src/ansys/dpf/core/operators/result/spectrum_data.py +++ b/src/ansys/dpf/core/operators/result/spectrum_data.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class spectrum_data(Operator): r"""Read spectral data from the result files contained in the streams or @@ -204,13 +210,17 @@ class InputsSpectrumData(_Inputs): def __init__(self, op: Operator): super().__init__(spectrum_data._spec().inputs, op) - self._streams = Input(spectrum_data._spec().input_pin(3), 3, op, -1) + self._streams: Input[StreamsContainer] = Input( + spectrum_data._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams) - self._data_sources = Input(spectrum_data._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + spectrum_data._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Result file container allowed to be kept open to cache data. @@ -231,7 +241,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Result file path container, used if no streams are set. @@ -271,21 +281,33 @@ class OutputsSpectrumData(_Outputs): def __init__(self, op: Operator): super().__init__(spectrum_data._spec().outputs, op) - self._participation_factors = Output(spectrum_data._spec().output_pin(0), 0, op) + self._participation_factors: Output[FieldsContainer] = Output( + spectrum_data._spec().output_pin(0), 0, op + ) self._outputs.append(self._participation_factors) - self._mode_coefficients = Output(spectrum_data._spec().output_pin(1), 1, op) + self._mode_coefficients: Output[FieldsContainer] = Output( + spectrum_data._spec().output_pin(1), 1, op + ) self._outputs.append(self._mode_coefficients) - self._damping_ratios = Output(spectrum_data._spec().output_pin(2), 2, op) + self._damping_ratios: Output[FieldsContainer] = Output( + spectrum_data._spec().output_pin(2), 2, op + ) self._outputs.append(self._damping_ratios) - self._global_damping = Output(spectrum_data._spec().output_pin(3), 3, op) + self._global_damping: Output[FieldsContainer] = Output( + spectrum_data._spec().output_pin(3), 3, op + ) self._outputs.append(self._global_damping) - self._missing_mass = Output(spectrum_data._spec().output_pin(4), 4, op) + self._missing_mass: Output[FieldsContainer] = Output( + spectrum_data._spec().output_pin(4), 4, op + ) self._outputs.append(self._missing_mass) - self._rigid_response = Output(spectrum_data._spec().output_pin(5), 5, op) + self._rigid_response: Output[FieldsContainer] = Output( + spectrum_data._spec().output_pin(5), 5, op + ) self._outputs.append(self._rigid_response) @property - def participation_factors(self) -> Output: + def participation_factors(self) -> Output[FieldsContainer]: r"""Allows to get participation_factors output of the operator Fields container holding participation factors. @@ -305,7 +327,7 @@ def participation_factors(self) -> Output: return self._participation_factors @property - def mode_coefficients(self) -> Output: + def mode_coefficients(self) -> Output[FieldsContainer]: r"""Allows to get mode_coefficients output of the operator Fields container holding mode coefficients (PRS File). @@ -325,7 +347,7 @@ def mode_coefficients(self) -> Output: return self._mode_coefficients @property - def damping_ratios(self) -> Output: + def damping_ratios(self) -> Output[FieldsContainer]: r"""Allows to get damping_ratios output of the operator Fields container holding damping ratios (PRS File). @@ -345,7 +367,7 @@ def damping_ratios(self) -> Output: return self._damping_ratios @property - def global_damping(self) -> Output: + def global_damping(self) -> Output[FieldsContainer]: r"""Allows to get global_damping output of the operator Fields container holding for each spectrum: Global Damping Ratio, Damping Stiffness Coefficient & Damping Mass Coefficient (PRS File). @@ -365,7 +387,7 @@ def global_damping(self) -> Output: return self._global_damping @property - def missing_mass(self) -> Output: + def missing_mass(self) -> Output[FieldsContainer]: r"""Allows to get missing_mass output of the operator Fields container holding for each spectrum: Missing Mass Mode (0: None, 1: Active), Missing Mass Effect ZPA (PRS File). @@ -385,7 +407,7 @@ def missing_mass(self) -> Output: return self._missing_mass @property - def rigid_response(self) -> Output: + def rigid_response(self) -> Output[FieldsContainer]: r"""Allows to get rigid_response output of the operator Fields container holding for each spectrum: Rigid Response Mode (0: None, 1: Gupta, 2: Lindley), Freq Begin (Gupta) / ZPA (Lindley), Freq End (Gupta) (PRS File). diff --git a/src/ansys/dpf/core/operators/result/state_variable.py b/src/ansys/dpf/core/operators/result/state_variable.py index 10d136da4f5..fd672ac92a9 100644 --- a/src/ansys/dpf/core/operators/result/state_variable.py +++ b/src/ansys/dpf/core/operators/result/state_variable.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class state_variable(Operator): r"""Read/compute elemental state variable by calling the readers defined by @@ -571,39 +582,61 @@ class InputsStateVariable(_Inputs): def __init__(self, op: Operator): super().__init__(state_variable._spec().inputs, op) - self._time_scoping = Input(state_variable._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + state_variable._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(state_variable._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + state_variable._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(state_variable._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + state_variable._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(state_variable._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + state_variable._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(state_variable._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + state_variable._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( state_variable._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(state_variable._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + state_variable._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(state_variable._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + state_variable._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._item_index = Input(state_variable._spec().input_pin(10), 10, op, -1) + self._item_index: Input[int] = Input( + state_variable._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._item_index) - self._read_beams = Input(state_variable._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + state_variable._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(state_variable._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + state_variable._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(state_variable._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + state_variable._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( state_variable._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -624,7 +657,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -645,7 +678,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -666,7 +699,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -687,7 +720,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -708,7 +741,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -729,7 +762,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -750,7 +783,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -771,7 +804,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def item_index(self) -> Input: + def item_index(self) -> Input[int]: r"""Allows to connect item_index input to the operator. Index of requested item. @@ -792,7 +825,7 @@ def item_index(self) -> Input: return self._item_index @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -813,7 +846,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -834,7 +867,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -855,7 +888,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -890,11 +923,13 @@ class OutputsStateVariable(_Outputs): def __init__(self, op: Operator): super().__init__(state_variable._spec().outputs, op) - self._fields_container = Output(state_variable._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + state_variable._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/static_pressure.py b/src/ansys/dpf/core/operators/result/static_pressure.py index 46778a56af1..af4cf86aaa7 100644 --- a/src/ansys/dpf/core/operators/result/static_pressure.py +++ b/src/ansys/dpf/core/operators/result/static_pressure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class static_pressure(Operator): r"""Read Static Pressure by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsStaticPressure(_Inputs): def __init__(self, op: Operator): super().__init__(static_pressure._spec().inputs, op) - self._time_scoping = Input(static_pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + static_pressure._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(static_pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + static_pressure._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(static_pressure._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + static_pressure._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(static_pressure._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + static_pressure._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(static_pressure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + static_pressure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(static_pressure._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + static_pressure._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(static_pressure._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + static_pressure._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(static_pressure._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + static_pressure._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsStaticPressure(_Outputs): def __init__(self, op: Operator): super().__init__(static_pressure._spec().outputs, op) - self._fields_container = Output(static_pressure._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + static_pressure._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stiffness_matrix_energy.py b/src/ansys/dpf/core/operators/result/stiffness_matrix_energy.py index 5f9b248b846..0d280cf15ee 100644 --- a/src/ansys/dpf/core/operators/result/stiffness_matrix_energy.py +++ b/src/ansys/dpf/core/operators/result/stiffness_matrix_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stiffness_matrix_energy(Operator): r"""Read/compute element energy associated with the stiffness matrix by @@ -315,49 +326,53 @@ class InputsStiffnessMatrixEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(stiffness_matrix_energy._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( stiffness_matrix_energy._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( stiffness_matrix_energy._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( stiffness_matrix_energy._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stiffness_matrix_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( stiffness_matrix_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( stiffness_matrix_energy._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stiffness_matrix_energy._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stiffness_matrix_energy._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( stiffness_matrix_energy._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( stiffness_matrix_energy._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input( + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( stiffness_matrix_energy._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(stiffness_matrix_energy._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input( + stiffness_matrix_energy._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -378,7 +393,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -399,7 +414,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -420,7 +435,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -441,7 +456,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -462,7 +477,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -483,7 +498,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -504,7 +519,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -525,7 +540,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -546,7 +561,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -567,7 +582,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -602,13 +617,13 @@ class OutputsStiffnessMatrixEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(stiffness_matrix_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( stiffness_matrix_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/strain_eqv_as_mechanical.py b/src/ansys/dpf/core/operators/result/strain_eqv_as_mechanical.py index 157e5db66a7..4ea9964637f 100644 --- a/src/ansys/dpf/core/operators/result/strain_eqv_as_mechanical.py +++ b/src/ansys/dpf/core/operators/result/strain_eqv_as_mechanical.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class strain_eqv_as_mechanical(Operator): r"""Computes the equivalent (Von Mises) elastic strains and averages it to @@ -286,43 +296,45 @@ class InputsStrainEqvAsMechanical(_Inputs): def __init__(self, op: Operator): super().__init__(strain_eqv_as_mechanical._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping] = Input( strain_eqv_as_mechanical._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( strain_eqv_as_mechanical._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( strain_eqv_as_mechanical._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( strain_eqv_as_mechanical._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._mesh = Input(strain_eqv_as_mechanical._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + strain_eqv_as_mechanical._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( strain_eqv_as_mechanical._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._poisson_ratio = Input( + self._poisson_ratio: Input[int | float] = Input( strain_eqv_as_mechanical._spec().input_pin(13), 13, op, -1 ) self._inputs.append(self._poisson_ratio) - self._read_cyclic = Input( + self._read_cyclic: Input[bool] = Input( strain_eqv_as_mechanical._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._average_across_bodies = Input( + self._average_across_bodies: Input[bool] = Input( strain_eqv_as_mechanical._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._average_across_bodies) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids use scoping with TimeFreq_steps location) required in output. @@ -343,7 +355,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. @@ -364,7 +376,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data. @@ -385,7 +397,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container. @@ -406,7 +418,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the results file. @@ -427,7 +439,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. average the elemental nodal result to the requested location (default is nodal). @@ -448,7 +460,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def poisson_ratio(self) -> Input: + def poisson_ratio(self) -> Input[int | float]: r"""Allows to connect poisson_ratio input to the operator. Poisson ratio to be used in equivalent strain calculation. @@ -469,7 +481,7 @@ def poisson_ratio(self) -> Input: return self._poisson_ratio @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[bool]: r"""Allows to connect read_cyclic input to the operator. if true, cyclic expansion is done. If false, it's ignored. @@ -490,7 +502,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def average_across_bodies(self) -> Input: + def average_across_bodies(self) -> Input[bool]: r"""Allows to connect average_across_bodies input to the operator. for multibody simulations, the stresses are averaged across bodies if true or not if false (default). @@ -526,17 +538,17 @@ class OutputsStrainEqvAsMechanical(_Outputs): def __init__(self, op: Operator): super().__init__(strain_eqv_as_mechanical._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( strain_eqv_as_mechanical._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) - self._meshes_container = Output( + self._meshes_container: Output[MeshesContainer] = Output( strain_eqv_as_mechanical._spec().output_pin(1), 1, op ) self._outputs.append(self._meshes_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns @@ -554,7 +566,7 @@ def fields_container(self) -> Output: return self._fields_container @property - def meshes_container(self) -> Output: + def meshes_container(self) -> Output[MeshesContainer]: r"""Allows to get meshes_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/strain_eqv_as_mechanical_workflow.py b/src/ansys/dpf/core/operators/result/strain_eqv_as_mechanical_workflow.py index 63ee4e9c864..903d0c0e6bf 100644 --- a/src/ansys/dpf/core/operators/result/strain_eqv_as_mechanical_workflow.py +++ b/src/ansys/dpf/core/operators/result/strain_eqv_as_mechanical_workflow.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.workflow import Workflow + class strain_eqv_as_mechanical_workflow(Operator): r"""Generates a workflow that computes the equivalent (Von Mises) elastic @@ -268,41 +278,41 @@ class InputsStrainEqvAsMechanicalWorkflow(_Inputs): def __init__(self, op: Operator): super().__init__(strain_eqv_as_mechanical_workflow._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping] = Input( strain_eqv_as_mechanical_workflow._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( strain_eqv_as_mechanical_workflow._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( strain_eqv_as_mechanical_workflow._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( strain_eqv_as_mechanical_workflow._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( strain_eqv_as_mechanical_workflow._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( strain_eqv_as_mechanical_workflow._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[bool] = Input( strain_eqv_as_mechanical_workflow._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._average_across_bodies = Input( + self._average_across_bodies: Input[bool] = Input( strain_eqv_as_mechanical_workflow._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._average_across_bodies) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids use scoping with TimeFreq_steps location) required in output. @@ -323,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. @@ -344,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data. @@ -365,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container. @@ -386,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the results file. @@ -407,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. average the elemental nodal result to the requested location (default is nodal). @@ -428,7 +438,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[bool]: r"""Allows to connect read_cyclic input to the operator. if true, cyclic expansion is done. If false, it's ignored.. @@ -449,7 +459,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def average_across_bodies(self) -> Input: + def average_across_bodies(self) -> Input[bool]: r"""Allows to connect average_across_bodies input to the operator. for multibody simulations, the stresses are averaged across bodies if true or not if false (default). @@ -484,13 +494,13 @@ class OutputsStrainEqvAsMechanicalWorkflow(_Outputs): def __init__(self, op: Operator): super().__init__(strain_eqv_as_mechanical_workflow._spec().outputs, op) - self._workflow = Output( + self._workflow: Output[Workflow] = Output( strain_eqv_as_mechanical_workflow._spec().output_pin(0), 0, op ) self._outputs.append(self._workflow) @property - def workflow(self) -> Output: + def workflow(self) -> Output[Workflow]: r"""Allows to get workflow output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress.py b/src/ansys/dpf/core/operators/result/stress.py index baf37abd382..696ad871ef5 100644 --- a/src/ansys/dpf/core/operators/result/stress.py +++ b/src/ansys/dpf/core/operators/result/stress.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress(Operator): r"""Read/compute element nodal component stresses by calling the readers @@ -619,41 +630,65 @@ class InputsStress(_Inputs): def __init__(self, op: Operator): super().__init__(stress._spec().inputs, op) - self._time_scoping = Input(stress._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + stress._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + stress._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(stress._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + stress._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input(stress._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(stress._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( + stress._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(stress._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( + stress._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(stress._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input(stress._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) - self._read_beams = Input(stress._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input(stress._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) - self._split_shells = Input(stress._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + stress._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(stress._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input(stress._spec().input_pin(27), 27, op, -1) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input(stress._spec().input_pin(28), 28, op, -1) + self._extend_to_mid_nodes: Input[bool] = Input( + stress._spec().input_pin(28), 28, op, -1 + ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -674,7 +709,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -695,7 +730,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -716,7 +751,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -737,7 +772,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -758,7 +793,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -779,7 +814,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -800,7 +835,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -821,7 +856,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -842,7 +877,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -863,7 +898,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -884,7 +919,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -905,7 +940,7 @@ def phi(self) -> Input: return self._phi @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -926,7 +961,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -947,7 +982,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -968,7 +1003,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -1003,11 +1038,13 @@ class OutputsStress(_Outputs): def __init__(self, op: Operator): super().__init__(stress._spec().outputs, op) - self._fields_container = Output(stress._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_X.py b/src/ansys/dpf/core/operators/result/stress_X.py index 053dbe8a33c..391e6c018da 100644 --- a/src/ansys/dpf/core/operators/result/stress_X.py +++ b/src/ansys/dpf/core/operators/result/stress_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_X(Operator): r"""Read/compute element nodal component stresses XX normal component (00 @@ -303,29 +314,49 @@ class InputsStressX(_Inputs): def __init__(self, op: Operator): super().__init__(stress_X._spec().inputs, op) - self._time_scoping = Input(stress_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_X._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + stress_X._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + stress_X._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(stress_X._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + stress_X._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_X._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_X._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -346,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -367,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -388,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -409,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -430,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -451,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -472,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -493,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -549,11 +580,13 @@ class OutputsStressX(_Outputs): def __init__(self, op: Operator): super().__init__(stress_X._spec().outputs, op) - self._fields_container = Output(stress_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_XY.py b/src/ansys/dpf/core/operators/result/stress_XY.py index d4edd74ae33..faee1b73a38 100644 --- a/src/ansys/dpf/core/operators/result/stress_XY.py +++ b/src/ansys/dpf/core/operators/result/stress_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_XY(Operator): r"""Read/compute element nodal component stresses XY shear component (01 @@ -303,29 +314,49 @@ class InputsStressXy(_Inputs): def __init__(self, op: Operator): super().__init__(stress_XY._spec().inputs, op) - self._time_scoping = Input(stress_XY._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_XY._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_XY._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_XY._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_XY._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_XY._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_XY._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + stress_XY._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_XY._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_XY._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_XY._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + stress_XY._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_XY._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_XY._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(stress_XY._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + stress_XY._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_XY._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_XY._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_XY._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_XY._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -346,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -367,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -388,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -409,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -430,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -451,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -472,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -493,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -549,11 +580,13 @@ class OutputsStressXy(_Outputs): def __init__(self, op: Operator): super().__init__(stress_XY._spec().outputs, op) - self._fields_container = Output(stress_XY._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_XY._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_XZ.py b/src/ansys/dpf/core/operators/result/stress_XZ.py index 7ae55d250b7..0f4a6ba912d 100644 --- a/src/ansys/dpf/core/operators/result/stress_XZ.py +++ b/src/ansys/dpf/core/operators/result/stress_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_XZ(Operator): r"""Read/compute element nodal component stresses XZ shear component (02 @@ -303,29 +314,49 @@ class InputsStressXz(_Inputs): def __init__(self, op: Operator): super().__init__(stress_XZ._spec().inputs, op) - self._time_scoping = Input(stress_XZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_XZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_XZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_XZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_XZ._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_XZ._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_XZ._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + stress_XZ._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_XZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_XZ._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + stress_XZ._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_XZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_XZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(stress_XZ._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + stress_XZ._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_XZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_XZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_XZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_XZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -346,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -367,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -388,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -409,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -430,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -451,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -472,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -493,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -549,11 +580,13 @@ class OutputsStressXz(_Outputs): def __init__(self, op: Operator): super().__init__(stress_XZ._spec().outputs, op) - self._fields_container = Output(stress_XZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_XZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_Y.py b/src/ansys/dpf/core/operators/result/stress_Y.py index cd80e0e8846..3f0c07f2e02 100644 --- a/src/ansys/dpf/core/operators/result/stress_Y.py +++ b/src/ansys/dpf/core/operators/result/stress_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_Y(Operator): r"""Read/compute element nodal component stresses YY normal component (11 @@ -303,29 +314,49 @@ class InputsStressY(_Inputs): def __init__(self, op: Operator): super().__init__(stress_Y._spec().inputs, op) - self._time_scoping = Input(stress_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_Y._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + stress_Y._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + stress_Y._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(stress_Y._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + stress_Y._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_Y._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_Y._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -346,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -367,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -388,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -409,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -430,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -451,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -472,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -493,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -549,11 +580,13 @@ class OutputsStressY(_Outputs): def __init__(self, op: Operator): super().__init__(stress_Y._spec().outputs, op) - self._fields_container = Output(stress_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_YZ.py b/src/ansys/dpf/core/operators/result/stress_YZ.py index 2fdce30d31a..a8828a946da 100644 --- a/src/ansys/dpf/core/operators/result/stress_YZ.py +++ b/src/ansys/dpf/core/operators/result/stress_YZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_YZ(Operator): r"""Read/compute element nodal component stresses YZ shear component (12 @@ -303,29 +314,49 @@ class InputsStressYz(_Inputs): def __init__(self, op: Operator): super().__init__(stress_YZ._spec().inputs, op) - self._time_scoping = Input(stress_YZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_YZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_YZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_YZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_YZ._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_YZ._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_YZ._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + stress_YZ._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_YZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_YZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_YZ._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + stress_YZ._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_YZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_YZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(stress_YZ._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + stress_YZ._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_YZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_YZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_YZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_YZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -346,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -367,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -388,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -409,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -430,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -451,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -472,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -493,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -549,11 +580,13 @@ class OutputsStressYz(_Outputs): def __init__(self, op: Operator): super().__init__(stress_YZ._spec().outputs, op) - self._fields_container = Output(stress_YZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_YZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_Z.py b/src/ansys/dpf/core/operators/result/stress_Z.py index 24a5734639e..f0097a7173a 100644 --- a/src/ansys/dpf/core/operators/result/stress_Z.py +++ b/src/ansys/dpf/core/operators/result/stress_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_Z(Operator): r"""Read/compute element nodal component stresses ZZ normal component (22 @@ -303,29 +314,49 @@ class InputsStressZ(_Inputs): def __init__(self, op: Operator): super().__init__(stress_Z._spec().inputs, op) - self._time_scoping = Input(stress_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_Z._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + stress_Z._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + stress_Z._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(stress_Z._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + stress_Z._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_Z._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_Z._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -346,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -367,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -388,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -409,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -430,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -451,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -472,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -493,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -514,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -549,11 +580,13 @@ class OutputsStressZ(_Outputs): def __init__(self, op: Operator): super().__init__(stress_Z._spec().outputs, op) - self._fields_container = Output(stress_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_eqv_as_mechanical.py b/src/ansys/dpf/core/operators/result/stress_eqv_as_mechanical.py index 7ba99c6d40f..02c61bf1e79 100644 --- a/src/ansys/dpf/core/operators/result/stress_eqv_as_mechanical.py +++ b/src/ansys/dpf/core/operators/result/stress_eqv_as_mechanical.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_eqv_as_mechanical(Operator): r"""Computes the equivalent (Von Mises) stresses and averages it to the @@ -270,39 +280,41 @@ class InputsStressEqvAsMechanical(_Inputs): def __init__(self, op: Operator): super().__init__(stress_eqv_as_mechanical._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping] = Input( stress_eqv_as_mechanical._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( stress_eqv_as_mechanical._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stress_eqv_as_mechanical._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( stress_eqv_as_mechanical._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._mesh = Input(stress_eqv_as_mechanical._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_eqv_as_mechanical._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( stress_eqv_as_mechanical._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[bool] = Input( stress_eqv_as_mechanical._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._average_across_bodies = Input( + self._average_across_bodies: Input[bool] = Input( stress_eqv_as_mechanical._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._average_across_bodies) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids use scoping with TimeFreq_steps location) required in output. @@ -323,7 +335,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. @@ -344,7 +356,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data. @@ -365,7 +377,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container. @@ -386,7 +398,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the results file. @@ -407,7 +419,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. average the elemental nodal result to the requested location (default is nodal). @@ -428,7 +440,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[bool]: r"""Allows to connect read_cyclic input to the operator. if true, cyclic expansion is done. If false, it's ignored.. @@ -449,7 +461,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def average_across_bodies(self) -> Input: + def average_across_bodies(self) -> Input[bool]: r"""Allows to connect average_across_bodies input to the operator. for multibody simulations, the stresses are averaged across bodies if true or not if false (default). @@ -485,17 +497,17 @@ class OutputsStressEqvAsMechanical(_Outputs): def __init__(self, op: Operator): super().__init__(stress_eqv_as_mechanical._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( stress_eqv_as_mechanical._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) - self._meshes_container = Output( + self._meshes_container: Output[MeshesContainer] = Output( stress_eqv_as_mechanical._spec().output_pin(1), 1, op ) self._outputs.append(self._meshes_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns @@ -513,7 +525,7 @@ def fields_container(self) -> Output: return self._fields_container @property - def meshes_container(self) -> Output: + def meshes_container(self) -> Output[MeshesContainer]: r"""Allows to get meshes_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_eqv_as_mechanical_workflow.py b/src/ansys/dpf/core/operators/result/stress_eqv_as_mechanical_workflow.py index 41a5e25edde..858452850d2 100644 --- a/src/ansys/dpf/core/operators/result/stress_eqv_as_mechanical_workflow.py +++ b/src/ansys/dpf/core/operators/result/stress_eqv_as_mechanical_workflow.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,15 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.workflow import Workflow + class stress_eqv_as_mechanical_workflow(Operator): r"""Generates a workflow that computes the equivalent (Von Mises) stresses @@ -266,41 +276,41 @@ class InputsStressEqvAsMechanicalWorkflow(_Inputs): def __init__(self, op: Operator): super().__init__(stress_eqv_as_mechanical_workflow._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping] = Input( stress_eqv_as_mechanical_workflow._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( stress_eqv_as_mechanical_workflow._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stress_eqv_as_mechanical_workflow._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( stress_eqv_as_mechanical_workflow._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( stress_eqv_as_mechanical_workflow._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( stress_eqv_as_mechanical_workflow._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[bool] = Input( stress_eqv_as_mechanical_workflow._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._average_across_bodies = Input( + self._average_across_bodies: Input[bool] = Input( stress_eqv_as_mechanical_workflow._spec().input_pin(200), 200, op, -1 ) self._inputs.append(self._average_across_bodies) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids use scoping with TimeFreq_steps location) required in output. @@ -321,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. @@ -342,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data. @@ -363,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container. @@ -384,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the results file. @@ -405,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. average the elemental nodal result to the requested location (default is nodal). @@ -426,7 +436,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[bool]: r"""Allows to connect read_cyclic input to the operator. if true, cyclic expansion is done. If false, it's ignored.. @@ -447,7 +457,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def average_across_bodies(self) -> Input: + def average_across_bodies(self) -> Input[bool]: r"""Allows to connect average_across_bodies input to the operator. for multibody simulations, the stresses are averaged across bodies if true or not if false (default). @@ -482,13 +492,13 @@ class OutputsStressEqvAsMechanicalWorkflow(_Outputs): def __init__(self, op: Operator): super().__init__(stress_eqv_as_mechanical_workflow._spec().outputs, op) - self._workflow = Output( + self._workflow: Output[Workflow] = Output( stress_eqv_as_mechanical_workflow._spec().output_pin(0), 0, op ) self._outputs.append(self._workflow) @property - def workflow(self) -> Output: + def workflow(self) -> Output[Workflow]: r"""Allows to get workflow output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_intensity.py b/src/ansys/dpf/core/operators/result/stress_intensity.py index 34dabf423c2..de02ff540ac 100644 --- a/src/ansys/dpf/core/operators/result/stress_intensity.py +++ b/src/ansys/dpf/core/operators/result/stress_intensity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_intensity(Operator): r"""Reads/computes element nodal component stresses, average it on nodes (by @@ -302,35 +313,49 @@ class InputsStressIntensity(_Inputs): def __init__(self, op: Operator): super().__init__(stress_intensity._spec().inputs, op) - self._time_scoping = Input(stress_intensity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_intensity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_intensity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_intensity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_intensity._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_intensity._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stress_intensity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_intensity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_intensity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( stress_intensity._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_intensity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_intensity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( stress_intensity._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_intensity._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_intensity._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_intensity._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_intensity._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -351,7 +376,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -372,7 +397,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -393,7 +418,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -414,7 +439,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -435,7 +460,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -456,7 +481,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -477,7 +502,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -496,7 +521,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -517,7 +542,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -552,11 +577,13 @@ class OutputsStressIntensity(_Outputs): def __init__(self, op: Operator): super().__init__(stress_intensity._spec().outputs, op) - self._fields_container = Output(stress_intensity._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_intensity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_max_shear.py b/src/ansys/dpf/core/operators/result/stress_max_shear.py index 810f6332e35..b29f4117614 100644 --- a/src/ansys/dpf/core/operators/result/stress_max_shear.py +++ b/src/ansys/dpf/core/operators/result/stress_max_shear.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_max_shear(Operator): r"""Reads/computes element nodal component stresses, average it on nodes (by @@ -302,35 +313,49 @@ class InputsStressMaxShear(_Inputs): def __init__(self, op: Operator): super().__init__(stress_max_shear._spec().inputs, op) - self._time_scoping = Input(stress_max_shear._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_max_shear._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_max_shear._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_max_shear._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_max_shear._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_max_shear._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stress_max_shear._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_max_shear._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_max_shear._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( stress_max_shear._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_max_shear._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_max_shear._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( stress_max_shear._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_max_shear._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_max_shear._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_max_shear._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_max_shear._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -351,7 +376,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -372,7 +397,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -393,7 +418,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -414,7 +439,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -435,7 +460,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -456,7 +481,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -477,7 +502,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -496,7 +521,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -517,7 +542,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -552,11 +577,13 @@ class OutputsStressMaxShear(_Outputs): def __init__(self, op: Operator): super().__init__(stress_max_shear._spec().outputs, op) - self._fields_container = Output(stress_max_shear._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_max_shear._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_principal_1.py b/src/ansys/dpf/core/operators/result/stress_principal_1.py index 4b669549017..df7c61c6195 100644 --- a/src/ansys/dpf/core/operators/result/stress_principal_1.py +++ b/src/ansys/dpf/core/operators/result/stress_principal_1.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_principal_1(Operator): r"""Read/compute element nodal component stresses 1st principal component by @@ -304,37 +315,49 @@ class InputsStressPrincipal1(_Inputs): def __init__(self, op: Operator): super().__init__(stress_principal_1._spec().inputs, op) - self._time_scoping = Input(stress_principal_1._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_principal_1._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_principal_1._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_principal_1._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( stress_principal_1._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stress_principal_1._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_principal_1._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_principal_1._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( stress_principal_1._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_principal_1._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_principal_1._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( stress_principal_1._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_principal_1._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_principal_1._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_principal_1._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_principal_1._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -355,7 +378,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -376,7 +399,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -397,7 +420,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -418,7 +441,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -439,7 +462,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -460,7 +483,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -481,7 +504,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -500,7 +523,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -521,7 +544,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -556,11 +579,13 @@ class OutputsStressPrincipal1(_Outputs): def __init__(self, op: Operator): super().__init__(stress_principal_1._spec().outputs, op) - self._fields_container = Output(stress_principal_1._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_principal_1._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_principal_2.py b/src/ansys/dpf/core/operators/result/stress_principal_2.py index 72976003dc4..eb20bd9a18c 100644 --- a/src/ansys/dpf/core/operators/result/stress_principal_2.py +++ b/src/ansys/dpf/core/operators/result/stress_principal_2.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_principal_2(Operator): r"""Read/compute element nodal component stresses 2nd principal component by @@ -304,37 +315,49 @@ class InputsStressPrincipal2(_Inputs): def __init__(self, op: Operator): super().__init__(stress_principal_2._spec().inputs, op) - self._time_scoping = Input(stress_principal_2._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_principal_2._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_principal_2._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_principal_2._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( stress_principal_2._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stress_principal_2._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_principal_2._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_principal_2._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( stress_principal_2._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_principal_2._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_principal_2._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( stress_principal_2._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_principal_2._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_principal_2._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_principal_2._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_principal_2._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -355,7 +378,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -376,7 +399,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -397,7 +420,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -418,7 +441,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -439,7 +462,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -460,7 +483,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -481,7 +504,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -500,7 +523,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -521,7 +544,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -556,11 +579,13 @@ class OutputsStressPrincipal2(_Outputs): def __init__(self, op: Operator): super().__init__(stress_principal_2._spec().outputs, op) - self._fields_container = Output(stress_principal_2._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_principal_2._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_principal_3.py b/src/ansys/dpf/core/operators/result/stress_principal_3.py index d51230b4e75..e1f51b62d5d 100644 --- a/src/ansys/dpf/core/operators/result/stress_principal_3.py +++ b/src/ansys/dpf/core/operators/result/stress_principal_3.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_principal_3(Operator): r"""Read/compute element nodal component stresses 3rd principal component by @@ -304,37 +315,49 @@ class InputsStressPrincipal3(_Inputs): def __init__(self, op: Operator): super().__init__(stress_principal_3._spec().inputs, op) - self._time_scoping = Input(stress_principal_3._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_principal_3._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_principal_3._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_principal_3._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( stress_principal_3._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stress_principal_3._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_principal_3._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_principal_3._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( stress_principal_3._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_principal_3._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_principal_3._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( stress_principal_3._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_principal_3._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_principal_3._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_principal_3._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_principal_3._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -355,7 +378,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -376,7 +399,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -397,7 +420,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -418,7 +441,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -439,7 +462,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -460,7 +483,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -481,7 +504,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -500,7 +523,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -521,7 +544,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -556,11 +579,13 @@ class OutputsStressPrincipal3(_Outputs): def __init__(self, op: Operator): super().__init__(stress_principal_3._spec().outputs, op) - self._fields_container = Output(stress_principal_3._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_principal_3._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_ratio.py b/src/ansys/dpf/core/operators/result/stress_ratio.py index f8373ae79df..65965fcc324 100644 --- a/src/ansys/dpf/core/operators/result/stress_ratio.py +++ b/src/ansys/dpf/core/operators/result/stress_ratio.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_ratio(Operator): r"""Read/compute element nodal stress ratio by calling the readers defined @@ -555,37 +566,57 @@ class InputsStressRatio(_Inputs): def __init__(self, op: Operator): super().__init__(stress_ratio._spec().inputs, op) - self._time_scoping = Input(stress_ratio._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_ratio._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_ratio._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_ratio._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_ratio._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_ratio._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_ratio._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + stress_ratio._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_ratio._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_ratio._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( stress_ratio._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_ratio._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_ratio._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(stress_ratio._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + stress_ratio._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_beams = Input(stress_ratio._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_ratio._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(stress_ratio._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + stress_ratio._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(stress_ratio._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + stress_ratio._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( stress_ratio._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -606,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -627,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -648,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -669,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -690,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -711,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -732,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -753,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -774,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -795,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -816,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -851,11 +882,13 @@ class OutputsStressRatio(_Outputs): def __init__(self, op: Operator): super().__init__(stress_ratio._spec().outputs, op) - self._fields_container = Output(stress_ratio._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_ratio._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_rotation_by_euler_nodes.py b/src/ansys/dpf/core/operators/result/stress_rotation_by_euler_nodes.py index 3cf964d3265..e9ed1d56652 100644 --- a/src/ansys/dpf/core/operators/result/stress_rotation_by_euler_nodes.py +++ b/src/ansys/dpf/core/operators/result/stress_rotation_by_euler_nodes.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_rotation_by_euler_nodes(Operator): r"""read Euler angles on elements from the result file and rotate the fields @@ -24,7 +30,7 @@ class stress_rotation_by_euler_nodes(Operator): ------ fields_container: FieldsContainer, optional streams_container: StreamsContainer or Stream or Class - Dataprocessing::Crstfilewrapper, optional + Dataprocessing::Crstfilewrapper, optional data_sources: DataSources Outputs @@ -186,21 +192,21 @@ class InputsStressRotationByEulerNodes(_Inputs): def __init__(self, op: Operator): super().__init__(stress_rotation_by_euler_nodes._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( stress_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stress_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( stress_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -219,7 +225,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -238,7 +244,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -271,13 +277,13 @@ class OutputsStressRotationByEulerNodes(_Outputs): def __init__(self, op: Operator): super().__init__(stress_rotation_by_euler_nodes._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( stress_rotation_by_euler_nodes._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/stress_von_mises.py b/src/ansys/dpf/core/operators/result/stress_von_mises.py index db99763fdc4..3f3eac5e596 100644 --- a/src/ansys/dpf/core/operators/result/stress_von_mises.py +++ b/src/ansys/dpf/core/operators/result/stress_von_mises.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class stress_von_mises(Operator): r"""Reads/computes element nodal component stresses, average it on nodes (by @@ -302,35 +313,49 @@ class InputsStressVonMises(_Inputs): def __init__(self, op: Operator): super().__init__(stress_von_mises._spec().inputs, op) - self._time_scoping = Input(stress_von_mises._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + stress_von_mises._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_von_mises._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + stress_von_mises._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_von_mises._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + stress_von_mises._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( stress_von_mises._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_von_mises._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + stress_von_mises._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( stress_von_mises._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_von_mises._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + stress_von_mises._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( stress_von_mises._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_von_mises._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + stress_von_mises._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_von_mises._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + stress_von_mises._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -351,7 +376,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -372,7 +397,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -393,7 +418,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -414,7 +439,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -435,7 +460,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -456,7 +481,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -477,7 +502,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -496,7 +521,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -517,7 +542,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -552,11 +577,13 @@ class OutputsStressVonMises(_Outputs): def __init__(self, op: Operator): super().__init__(stress_von_mises._spec().outputs, op) - self._fields_container = Output(stress_von_mises._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + stress_von_mises._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/structural_temperature.py b/src/ansys/dpf/core/operators/result/structural_temperature.py index cc42f4f0103..fe88372443e 100644 --- a/src/ansys/dpf/core/operators/result/structural_temperature.py +++ b/src/ansys/dpf/core/operators/result/structural_temperature.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class structural_temperature(Operator): r"""Read/compute element structural nodal temperatures by calling the @@ -555,55 +566,57 @@ class InputsStructuralTemperature(_Inputs): def __init__(self, op: Operator): super().__init__(structural_temperature._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( structural_temperature._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( structural_temperature._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( structural_temperature._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( structural_temperature._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( structural_temperature._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( structural_temperature._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(structural_temperature._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + structural_temperature._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( structural_temperature._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( structural_temperature._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( structural_temperature._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( structural_temperature._spec().input_pin(27), 27, op, -1 ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( structural_temperature._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -624,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -645,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -666,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -687,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -708,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -729,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -750,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -771,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -792,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -813,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -834,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -869,13 +882,13 @@ class OutputsStructuralTemperature(_Outputs): def __init__(self, op: Operator): super().__init__(structural_temperature._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( structural_temperature._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/superficial_velocity.py b/src/ansys/dpf/core/operators/result/superficial_velocity.py index 59cda388ce0..b8372377c14 100644 --- a/src/ansys/dpf/core/operators/result/superficial_velocity.py +++ b/src/ansys/dpf/core/operators/result/superficial_velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class superficial_velocity(Operator): r"""Read Superficial Velocity by calling the readers defined by the @@ -267,33 +278,41 @@ class InputsSuperficialVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(superficial_velocity._spec().inputs, op) - self._time_scoping = Input(superficial_velocity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + superficial_velocity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(superficial_velocity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + superficial_velocity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( superficial_velocity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(superficial_velocity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + superficial_velocity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(superficial_velocity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + superficial_velocity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( superficial_velocity._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( superficial_velocity._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( superficial_velocity._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,13 +494,13 @@ class OutputsSuperficialVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(superficial_velocity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( superficial_velocity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/surface_heat_rate.py b/src/ansys/dpf/core/operators/result/surface_heat_rate.py index 1f16bf49534..94456be5908 100644 --- a/src/ansys/dpf/core/operators/result/surface_heat_rate.py +++ b/src/ansys/dpf/core/operators/result/surface_heat_rate.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class surface_heat_rate(Operator): r"""Read Surface Heat Rate by calling the readers defined by the @@ -267,33 +278,41 @@ class InputsSurfaceHeatRate(_Inputs): def __init__(self, op: Operator): super().__init__(surface_heat_rate._spec().inputs, op) - self._time_scoping = Input(surface_heat_rate._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + surface_heat_rate._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(surface_heat_rate._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + surface_heat_rate._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( surface_heat_rate._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(surface_heat_rate._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + surface_heat_rate._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(surface_heat_rate._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + surface_heat_rate._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( surface_heat_rate._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( surface_heat_rate._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( surface_heat_rate._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,11 +494,13 @@ class OutputsSurfaceHeatRate(_Outputs): def __init__(self, op: Operator): super().__init__(surface_heat_rate._spec().outputs, op) - self._fields_container = Output(surface_heat_rate._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + surface_heat_rate._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/swelling_strains.py b/src/ansys/dpf/core/operators/result/swelling_strains.py index d9b710bd607..00130999318 100644 --- a/src/ansys/dpf/core/operators/result/swelling_strains.py +++ b/src/ansys/dpf/core/operators/result/swelling_strains.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class swelling_strains(Operator): r"""Read/compute element nodal swelling strains by calling the readers @@ -555,41 +566,57 @@ class InputsSwellingStrains(_Inputs): def __init__(self, op: Operator): super().__init__(swelling_strains._spec().inputs, op) - self._time_scoping = Input(swelling_strains._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + swelling_strains._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(swelling_strains._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + swelling_strains._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(swelling_strains._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + swelling_strains._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( swelling_strains._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(swelling_strains._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + swelling_strains._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( swelling_strains._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(swelling_strains._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + swelling_strains._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( swelling_strains._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input(swelling_strains._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + swelling_strains._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(swelling_strains._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + swelling_strains._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(swelling_strains._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + swelling_strains._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( swelling_strains._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -610,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -631,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -652,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -673,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -694,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -715,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -736,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -757,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -778,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -799,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -820,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -855,11 +882,13 @@ class OutputsSwellingStrains(_Outputs): def __init__(self, op: Operator): super().__init__(swelling_strains._spec().outputs, op) - self._fields_container = Output(swelling_strains._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + swelling_strains._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/tangential_contact_force.py b/src/ansys/dpf/core/operators/result/tangential_contact_force.py index d285ed18af8..5937cd0e6bb 100644 --- a/src/ansys/dpf/core/operators/result/tangential_contact_force.py +++ b/src/ansys/dpf/core/operators/result/tangential_contact_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class tangential_contact_force(Operator): r"""Read/compute tangential contact force by calling the readers defined by @@ -251,35 +262,37 @@ class InputsTangentialContactForce(_Inputs): def __init__(self, op: Operator): super().__init__(tangential_contact_force._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( tangential_contact_force._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( tangential_contact_force._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( tangential_contact_force._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( tangential_contact_force._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( tangential_contact_force._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( tangential_contact_force._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(tangential_contact_force._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + tangential_contact_force._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsTangentialContactForce(_Outputs): def __init__(self, op: Operator): super().__init__(tangential_contact_force._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( tangential_contact_force._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/tangential_contact_moment.py b/src/ansys/dpf/core/operators/result/tangential_contact_moment.py index 2b4d7030885..c3c2b45d882 100644 --- a/src/ansys/dpf/core/operators/result/tangential_contact_moment.py +++ b/src/ansys/dpf/core/operators/result/tangential_contact_moment.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class tangential_contact_moment(Operator): r"""Read/compute tangential contact moment by calling the readers defined by @@ -251,35 +262,37 @@ class InputsTangentialContactMoment(_Inputs): def __init__(self, op: Operator): super().__init__(tangential_contact_moment._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( tangential_contact_moment._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( tangential_contact_moment._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( tangential_contact_moment._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( tangential_contact_moment._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( tangential_contact_moment._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( tangential_contact_moment._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(tangential_contact_moment._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + tangential_contact_moment._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsTangentialContactMoment(_Outputs): def __init__(self, op: Operator): super().__init__(tangential_contact_moment._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( tangential_contact_moment._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/temperature.py b/src/ansys/dpf/core/operators/result/temperature.py index dca724e855f..5354a33b19d 100644 --- a/src/ansys/dpf/core/operators/result/temperature.py +++ b/src/ansys/dpf/core/operators/result/temperature.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class temperature(Operator): r"""Read/compute temperature field by calling the readers defined by the @@ -363,39 +374,63 @@ class InputsTemperature(_Inputs): def __init__(self, op: Operator): super().__init__(temperature._spec().inputs, op) - self._time_scoping = Input(temperature._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + temperature._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(temperature._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + temperature._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(temperature._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + temperature._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(temperature._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + temperature._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(temperature._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + temperature._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(temperature._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + temperature._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(temperature._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + temperature._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(temperature._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + temperature._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input( + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( temperature._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(temperature._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( + temperature._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(temperature._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input(temperature._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) - self._region_scoping = Input(temperature._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + temperature._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(temperature._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + temperature._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(temperature._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + temperature._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -416,7 +451,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -437,7 +472,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -458,7 +493,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -479,7 +514,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -500,7 +535,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -521,7 +556,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -542,7 +577,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -563,7 +598,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -584,7 +619,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -605,7 +640,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -626,7 +661,7 @@ def phi(self) -> Input: return self._phi @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -647,7 +682,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -668,7 +703,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -703,11 +738,13 @@ class OutputsTemperature(_Outputs): def __init__(self, op: Operator): super().__init__(temperature._spec().outputs, op) - self._fields_container = Output(temperature._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + temperature._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/temperature_grad.py b/src/ansys/dpf/core/operators/result/temperature_grad.py index e7dbd7c30c0..94a4edc20c5 100644 --- a/src/ansys/dpf/core/operators/result/temperature_grad.py +++ b/src/ansys/dpf/core/operators/result/temperature_grad.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class temperature_grad(Operator): r"""Read/compute Temperature Gradient by calling the readers defined by the @@ -555,41 +566,57 @@ class InputsTemperatureGrad(_Inputs): def __init__(self, op: Operator): super().__init__(temperature_grad._spec().inputs, op) - self._time_scoping = Input(temperature_grad._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + temperature_grad._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(temperature_grad._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + temperature_grad._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(temperature_grad._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + temperature_grad._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( temperature_grad._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(temperature_grad._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + temperature_grad._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( temperature_grad._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(temperature_grad._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + temperature_grad._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( temperature_grad._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input(temperature_grad._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + temperature_grad._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(temperature_grad._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + temperature_grad._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(temperature_grad._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + temperature_grad._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( temperature_grad._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -610,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -631,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -652,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -673,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -694,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -715,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -736,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -757,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -778,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -799,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -820,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -855,11 +882,13 @@ class OutputsTemperatureGrad(_Outputs): def __init__(self, op: Operator): super().__init__(temperature_grad._spec().outputs, op) - self._fields_container = Output(temperature_grad._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + temperature_grad._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/temperature_grad_X.py b/src/ansys/dpf/core/operators/result/temperature_grad_X.py index 726d5de715e..c8e497da9b8 100644 --- a/src/ansys/dpf/core/operators/result/temperature_grad_X.py +++ b/src/ansys/dpf/core/operators/result/temperature_grad_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class temperature_grad_X(Operator): r"""Read/compute Temperature Gradient X component of the vector (1st @@ -303,37 +314,49 @@ class InputsTemperatureGradX(_Inputs): def __init__(self, op: Operator): super().__init__(temperature_grad_X._spec().inputs, op) - self._time_scoping = Input(temperature_grad_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + temperature_grad_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(temperature_grad_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + temperature_grad_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( temperature_grad_X._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( temperature_grad_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(temperature_grad_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + temperature_grad_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( temperature_grad_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(temperature_grad_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + temperature_grad_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( temperature_grad_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(temperature_grad_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + temperature_grad_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(temperature_grad_X._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + temperature_grad_X._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsTemperatureGradX(_Outputs): def __init__(self, op: Operator): super().__init__(temperature_grad_X._spec().outputs, op) - self._fields_container = Output(temperature_grad_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + temperature_grad_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/temperature_grad_Y.py b/src/ansys/dpf/core/operators/result/temperature_grad_Y.py index b00d93ac807..0e3d62a863a 100644 --- a/src/ansys/dpf/core/operators/result/temperature_grad_Y.py +++ b/src/ansys/dpf/core/operators/result/temperature_grad_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class temperature_grad_Y(Operator): r"""Read/compute Temperature Gradient Y component of the vector (2nd @@ -303,37 +314,49 @@ class InputsTemperatureGradY(_Inputs): def __init__(self, op: Operator): super().__init__(temperature_grad_Y._spec().inputs, op) - self._time_scoping = Input(temperature_grad_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + temperature_grad_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(temperature_grad_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + temperature_grad_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( temperature_grad_Y._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( temperature_grad_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(temperature_grad_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + temperature_grad_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( temperature_grad_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(temperature_grad_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + temperature_grad_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( temperature_grad_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(temperature_grad_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + temperature_grad_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(temperature_grad_Y._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + temperature_grad_Y._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsTemperatureGradY(_Outputs): def __init__(self, op: Operator): super().__init__(temperature_grad_Y._spec().outputs, op) - self._fields_container = Output(temperature_grad_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + temperature_grad_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/temperature_grad_Z.py b/src/ansys/dpf/core/operators/result/temperature_grad_Z.py index 7b49299f685..672d8bcf1c0 100644 --- a/src/ansys/dpf/core/operators/result/temperature_grad_Z.py +++ b/src/ansys/dpf/core/operators/result/temperature_grad_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class temperature_grad_Z(Operator): r"""Read/compute Temperature Gradient Z component of the vector (3rd @@ -303,37 +314,49 @@ class InputsTemperatureGradZ(_Inputs): def __init__(self, op: Operator): super().__init__(temperature_grad_Z._spec().inputs, op) - self._time_scoping = Input(temperature_grad_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + temperature_grad_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(temperature_grad_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + temperature_grad_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( temperature_grad_Z._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( temperature_grad_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(temperature_grad_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + temperature_grad_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( temperature_grad_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(temperature_grad_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + temperature_grad_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( temperature_grad_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(temperature_grad_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + temperature_grad_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(temperature_grad_Z._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + temperature_grad_Z._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsTemperatureGradZ(_Outputs): def __init__(self, op: Operator): super().__init__(temperature_grad_Z._spec().outputs, op) - self._fields_container = Output(temperature_grad_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + temperature_grad_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_conductivity.py b/src/ansys/dpf/core/operators/result/thermal_conductivity.py index 30306739074..ad16137c8f6 100644 --- a/src/ansys/dpf/core/operators/result/thermal_conductivity.py +++ b/src/ansys/dpf/core/operators/result/thermal_conductivity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_conductivity(Operator): r"""Read Thermal Conductivity by calling the readers defined by the @@ -267,33 +278,41 @@ class InputsThermalConductivity(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_conductivity._spec().inputs, op) - self._time_scoping = Input(thermal_conductivity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thermal_conductivity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_conductivity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thermal_conductivity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_conductivity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_conductivity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thermal_conductivity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(thermal_conductivity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_conductivity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( thermal_conductivity._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( thermal_conductivity._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( thermal_conductivity._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,13 +494,13 @@ class OutputsThermalConductivity(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_conductivity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( thermal_conductivity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_dissipation_energy.py b/src/ansys/dpf/core/operators/result/thermal_dissipation_energy.py index d97d9f1da9c..e81d2f98e8e 100644 --- a/src/ansys/dpf/core/operators/result/thermal_dissipation_energy.py +++ b/src/ansys/dpf/core/operators/result/thermal_dissipation_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_dissipation_energy(Operator): r"""Read/compute thermal dissipation energy by calling the readers defined @@ -251,35 +262,37 @@ class InputsThermalDissipationEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_dissipation_energy._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( thermal_dissipation_energy._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( thermal_dissipation_energy._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( thermal_dissipation_energy._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_dissipation_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( thermal_dissipation_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_dissipation_energy._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_dissipation_energy._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_dissipation_energy._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -300,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -321,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -342,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -363,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -384,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -405,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -440,13 +453,13 @@ class OutputsThermalDissipationEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_dissipation_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( thermal_dissipation_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain.py b/src/ansys/dpf/core/operators/result/thermal_strain.py index 58f6666f373..99549ec5bb7 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain(Operator): r"""Read/compute element nodal component thermal strains by calling the @@ -555,37 +566,57 @@ class InputsThermalStrain(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain._spec().inputs, op) - self._time_scoping = Input(thermal_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thermal_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thermal_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(thermal_strain._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + thermal_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(thermal_strain._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + thermal_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_strain._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thermal_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(thermal_strain._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + thermal_strain._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_beams = Input(thermal_strain._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + thermal_strain._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input(thermal_strain._spec().input_pin(26), 26, op, -1) + self._split_shells: Input[bool] = Input( + thermal_strain._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._split_shells) - self._shell_layer = Input(thermal_strain._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + thermal_strain._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( thermal_strain._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -606,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -627,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -648,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -669,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -690,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -711,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -732,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -753,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -774,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -795,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -816,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -851,11 +882,13 @@ class OutputsThermalStrain(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain._spec().outputs, op) - self._fields_container = Output(thermal_strain._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + thermal_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain_X.py b/src/ansys/dpf/core/operators/result/thermal_strain_X.py index 1b0aa60562a..787eef6153b 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain_X.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain_X(Operator): r"""Read/compute element nodal component thermal strains XX normal component @@ -303,35 +314,49 @@ class InputsThermalStrainX(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain_X._spec().inputs, op) - self._time_scoping = Input(thermal_strain_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thermal_strain_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_strain_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thermal_strain_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(thermal_strain_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + thermal_strain_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strain_X._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_strain_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thermal_strain_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain_X._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strain_X._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(thermal_strain_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + thermal_strain_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_X._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + thermal_strain_X._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsThermalStrainX(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain_X._spec().outputs, op) - self._fields_container = Output(thermal_strain_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + thermal_strain_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain_XY.py b/src/ansys/dpf/core/operators/result/thermal_strain_XY.py index 74eb7f84c69..91819c8c99a 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain_XY.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain_XY.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain_XY(Operator): r"""Read/compute element nodal component thermal strains XY shear component @@ -303,37 +314,49 @@ class InputsThermalStrainXy(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain_XY._spec().inputs, op) - self._time_scoping = Input(thermal_strain_XY._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thermal_strain_XY._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_strain_XY._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thermal_strain_XY._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( thermal_strain_XY._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strain_XY._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_strain_XY._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thermal_strain_XY._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain_XY._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain_XY._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain_XY._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strain_XY._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(thermal_strain_XY._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + thermal_strain_XY._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_XY._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + thermal_strain_XY._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsThermalStrainXy(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain_XY._spec().outputs, op) - self._fields_container = Output(thermal_strain_XY._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + thermal_strain_XY._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain_XZ.py b/src/ansys/dpf/core/operators/result/thermal_strain_XZ.py index ebd5c73072b..6e6ef5d5f13 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain_XZ.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain_XZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain_XZ(Operator): r"""Read/compute element nodal component thermal strains XZ shear component @@ -303,37 +314,49 @@ class InputsThermalStrainXz(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain_XZ._spec().inputs, op) - self._time_scoping = Input(thermal_strain_XZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thermal_strain_XZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_strain_XZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thermal_strain_XZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( thermal_strain_XZ._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strain_XZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_strain_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thermal_strain_XZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain_XZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain_XZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain_XZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strain_XZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(thermal_strain_XZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + thermal_strain_XZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_XZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + thermal_strain_XZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsThermalStrainXz(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain_XZ._spec().outputs, op) - self._fields_container = Output(thermal_strain_XZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + thermal_strain_XZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain_Y.py b/src/ansys/dpf/core/operators/result/thermal_strain_Y.py index cebc3fccc19..34f5b1195ef 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain_Y.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain_Y(Operator): r"""Read/compute element nodal component thermal strains YY normal component @@ -303,35 +314,49 @@ class InputsThermalStrainY(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain_Y._spec().inputs, op) - self._time_scoping = Input(thermal_strain_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thermal_strain_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_strain_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thermal_strain_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(thermal_strain_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + thermal_strain_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strain_Y._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_strain_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thermal_strain_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain_Y._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strain_Y._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(thermal_strain_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + thermal_strain_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_Y._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + thermal_strain_Y._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsThermalStrainY(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain_Y._spec().outputs, op) - self._fields_container = Output(thermal_strain_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + thermal_strain_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain_YZ.py b/src/ansys/dpf/core/operators/result/thermal_strain_YZ.py index d2f89857a37..1bb5a5e1980 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain_YZ.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain_YZ.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain_YZ(Operator): r"""Read/compute element nodal component thermal strains YZ shear component @@ -303,37 +314,49 @@ class InputsThermalStrainYz(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain_YZ._spec().inputs, op) - self._time_scoping = Input(thermal_strain_YZ._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thermal_strain_YZ._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_strain_YZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thermal_strain_YZ._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( thermal_strain_YZ._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strain_YZ._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_strain_YZ._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thermal_strain_YZ._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain_YZ._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain_YZ._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain_YZ._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strain_YZ._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(thermal_strain_YZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + thermal_strain_YZ._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_YZ._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + thermal_strain_YZ._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -354,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -375,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -396,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -417,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -438,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -459,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -480,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -501,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -522,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -557,11 +580,13 @@ class OutputsThermalStrainYz(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain_YZ._spec().outputs, op) - self._fields_container = Output(thermal_strain_YZ._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + thermal_strain_YZ._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain_Z.py b/src/ansys/dpf/core/operators/result/thermal_strain_Z.py index add3ddf2257..9db8761757b 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain_Z.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain_Z(Operator): r"""Read/compute element nodal component thermal strains ZZ normal component @@ -303,35 +314,49 @@ class InputsThermalStrainZ(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain_Z._spec().inputs, op) - self._time_scoping = Input(thermal_strain_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thermal_strain_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_strain_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thermal_strain_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(thermal_strain_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + thermal_strain_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strain_Z._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_strain_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thermal_strain_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain_Z._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strain_Z._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(thermal_strain_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + thermal_strain_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_Z._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + thermal_strain_Z._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -352,7 +377,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -373,7 +398,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -394,7 +419,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -415,7 +440,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -436,7 +461,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -457,7 +482,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -478,7 +503,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location, default is Nodal @@ -499,7 +524,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -520,7 +545,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -555,11 +580,13 @@ class OutputsThermalStrainZ(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain_Z._spec().outputs, op) - self._fields_container = Output(thermal_strain_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + thermal_strain_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain_principal_1.py b/src/ansys/dpf/core/operators/result/thermal_strain_principal_1.py index 935f1406305..4f58ca81b99 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain_principal_1.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain_principal_1.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain_principal_1(Operator): r"""Read/compute element nodal component thermal strains 1st principal @@ -308,47 +319,49 @@ class InputsThermalStrainPrincipal1(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain_principal_1._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( thermal_strain_principal_1._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( thermal_strain_principal_1._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( thermal_strain_principal_1._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strain_principal_1._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( thermal_strain_principal_1._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain_principal_1._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain_principal_1._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain_principal_1._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strain_principal_1._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( thermal_strain_principal_1._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( thermal_strain_principal_1._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsThermalStrainPrincipal1(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain_principal_1._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( thermal_strain_principal_1._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain_principal_2.py b/src/ansys/dpf/core/operators/result/thermal_strain_principal_2.py index 894197aa50e..5ddc7dc3ef2 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain_principal_2.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain_principal_2.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain_principal_2(Operator): r"""Read/compute element nodal component thermal strains 2nd principal @@ -308,47 +319,49 @@ class InputsThermalStrainPrincipal2(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain_principal_2._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( thermal_strain_principal_2._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( thermal_strain_principal_2._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( thermal_strain_principal_2._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strain_principal_2._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( thermal_strain_principal_2._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain_principal_2._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain_principal_2._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain_principal_2._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strain_principal_2._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( thermal_strain_principal_2._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( thermal_strain_principal_2._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsThermalStrainPrincipal2(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain_principal_2._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( thermal_strain_principal_2._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strain_principal_3.py b/src/ansys/dpf/core/operators/result/thermal_strain_principal_3.py index eea17aeaed1..a27ab464eaa 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strain_principal_3.py +++ b/src/ansys/dpf/core/operators/result/thermal_strain_principal_3.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strain_principal_3(Operator): r"""Read/compute element nodal component thermal strains 3rd principal @@ -308,47 +319,49 @@ class InputsThermalStrainPrincipal3(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strain_principal_3._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( thermal_strain_principal_3._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( thermal_strain_principal_3._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( thermal_strain_principal_3._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strain_principal_3._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( thermal_strain_principal_3._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strain_principal_3._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain_principal_3._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strain_principal_3._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strain_principal_3._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_cyclic = Input( + self._read_cyclic: Input[int] = Input( thermal_strain_principal_3._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input( + self._read_beams: Input[bool] = Input( thermal_strain_principal_3._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -369,7 +382,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -390,7 +403,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -411,7 +424,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -432,7 +445,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -453,7 +466,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. This pin is removed for versions >25.2. An error is raised if connected. @@ -474,7 +487,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -495,7 +508,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -514,7 +527,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -535,7 +548,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -570,13 +583,13 @@ class OutputsThermalStrainPrincipal3(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strain_principal_3._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( thermal_strain_principal_3._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thermal_strains_eqv.py b/src/ansys/dpf/core/operators/result/thermal_strains_eqv.py index 43209ba7dbe..834cccf209b 100644 --- a/src/ansys/dpf/core/operators/result/thermal_strains_eqv.py +++ b/src/ansys/dpf/core/operators/result/thermal_strains_eqv.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thermal_strains_eqv(Operator): r"""Read/compute element nodal equivalent component thermal strains by @@ -555,45 +566,57 @@ class InputsThermalStrainsEqv(_Inputs): def __init__(self, op: Operator): super().__init__(thermal_strains_eqv._spec().inputs, op) - self._time_scoping = Input(thermal_strains_eqv._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thermal_strains_eqv._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_strains_eqv._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thermal_strains_eqv._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( thermal_strains_eqv._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( thermal_strains_eqv._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_strains_eqv._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thermal_strains_eqv._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( thermal_strains_eqv._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strains_eqv._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thermal_strains_eqv._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( thermal_strains_eqv._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._read_beams = Input(thermal_strains_eqv._spec().input_pin(22), 22, op, -1) + self._read_beams: Input[bool] = Input( + thermal_strains_eqv._spec().input_pin(22), 22, op, -1 + ) self._inputs.append(self._read_beams) - self._split_shells = Input( + self._split_shells: Input[bool] = Input( thermal_strains_eqv._spec().input_pin(26), 26, op, -1 ) self._inputs.append(self._split_shells) - self._shell_layer = Input(thermal_strains_eqv._spec().input_pin(27), 27, op, -1) + self._shell_layer: Input[int] = Input( + thermal_strains_eqv._spec().input_pin(27), 27, op, -1 + ) self._inputs.append(self._shell_layer) - self._extend_to_mid_nodes = Input( + self._extend_to_mid_nodes: Input[bool] = Input( thermal_strains_eqv._spec().input_pin(28), 28, op, -1 ) self._inputs.append(self._extend_to_mid_nodes) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -614,7 +637,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -635,7 +658,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -656,7 +679,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -677,7 +700,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -698,7 +721,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -719,7 +742,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -740,7 +763,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. requested location Nodal, Elemental or ElementalNodal @@ -761,7 +784,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def read_beams(self) -> Input: + def read_beams(self) -> Input[bool]: r"""Allows to connect read_beams input to the operator. elemental nodal beam results are read if this pin is set to true (default is false) @@ -782,7 +805,7 @@ def read_beams(self) -> Input: return self._read_beams @property - def split_shells(self) -> Input: + def split_shells(self) -> Input[bool]: r"""Allows to connect split_shells input to the operator. If true, this pin forces the results to be split by element shape, indicated by the presence of the 'elshape' label in the output. If false, the results for all elements shapes are combined. Default value is false if averaging is not required and true if averaging is required. @@ -803,7 +826,7 @@ def split_shells(self) -> Input: return self._split_shells @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. If connected, this pin allows you to extract the result only on the selected shell layer(s). The available values are: 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -824,7 +847,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def extend_to_mid_nodes(self) -> Input: + def extend_to_mid_nodes(self) -> Input[bool]: r"""Allows to connect extend_to_mid_nodes input to the operator. Compute mid nodes (when available) by averaging the neighbour corner nodes. Default: True @@ -859,13 +882,13 @@ class OutputsThermalStrainsEqv(_Outputs): def __init__(self, op: Operator): super().__init__(thermal_strains_eqv._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( thermal_strains_eqv._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/thickness.py b/src/ansys/dpf/core/operators/result/thickness.py index f4145ee653c..7caab413c9e 100644 --- a/src/ansys/dpf/core/operators/result/thickness.py +++ b/src/ansys/dpf/core/operators/result/thickness.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class thickness(Operator): r"""Read/compute thickness by calling the readers defined by the @@ -251,23 +262,37 @@ class InputsThickness(_Inputs): def __init__(self, op: Operator): super().__init__(thickness._spec().inputs, op) - self._time_scoping = Input(thickness._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + thickness._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thickness._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + thickness._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(thickness._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + thickness._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(thickness._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + thickness._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(thickness._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + thickness._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(thickness._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + thickness._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thickness._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + thickness._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -288,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -309,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -330,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -351,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -372,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -393,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -428,11 +453,13 @@ class OutputsThickness(_Outputs): def __init__(self, op: Operator): super().__init__(thickness._spec().outputs, op) - self._fields_container = Output(thickness._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + thickness._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/torque.py b/src/ansys/dpf/core/operators/result/torque.py index 20c06df667b..ce02bf53b53 100644 --- a/src/ansys/dpf/core/operators/result/torque.py +++ b/src/ansys/dpf/core/operators/result/torque.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class torque(Operator): r"""Compute torque of a force based on a 3D point. @@ -171,13 +176,17 @@ class InputsTorque(_Inputs): def __init__(self, op: Operator): super().__init__(torque._spec().inputs, op) - self._fields_container = Input(torque._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + torque._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._spoint = Input(torque._spec().input_pin(1), 1, op, -1) + self._spoint: Input[Field | FieldsContainer] = Input( + torque._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._spoint) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container containing the nodal forces. @@ -198,7 +207,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def spoint(self) -> Input: + def spoint(self) -> Input[Field | FieldsContainer]: r"""Allows to connect spoint input to the operator. Field or fields container containing the summation points for each associated field on pin 0. @@ -245,11 +254,13 @@ class OutputsTorque(_Outputs): def __init__(self, op: Operator): super().__init__(torque._spec().outputs, op) - self._fields_container = Output(torque._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + torque._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/total_contact_force.py b/src/ansys/dpf/core/operators/result/total_contact_force.py index 8b619f37817..726e56ec332 100644 --- a/src/ansys/dpf/core/operators/result/total_contact_force.py +++ b/src/ansys/dpf/core/operators/result/total_contact_force.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class total_contact_force(Operator): r"""Read/compute total contact force by calling the readers defined by the @@ -251,29 +262,37 @@ class InputsTotalContactForce(_Inputs): def __init__(self, op: Operator): super().__init__(total_contact_force._spec().inputs, op) - self._time_scoping = Input(total_contact_force._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + total_contact_force._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(total_contact_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + total_contact_force._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( total_contact_force._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( total_contact_force._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(total_contact_force._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + total_contact_force._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( total_contact_force._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(total_contact_force._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + total_contact_force._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,13 +453,13 @@ class OutputsTotalContactForce(_Outputs): def __init__(self, op: Operator): super().__init__(total_contact_force._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( total_contact_force._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/total_contact_moment.py b/src/ansys/dpf/core/operators/result/total_contact_moment.py index 3d16d347841..bc908728763 100644 --- a/src/ansys/dpf/core/operators/result/total_contact_moment.py +++ b/src/ansys/dpf/core/operators/result/total_contact_moment.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class total_contact_moment(Operator): r"""Read/compute total contact moment by calling the readers defined by the @@ -251,29 +262,37 @@ class InputsTotalContactMoment(_Inputs): def __init__(self, op: Operator): super().__init__(total_contact_moment._spec().inputs, op) - self._time_scoping = Input(total_contact_moment._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + total_contact_moment._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(total_contact_moment._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + total_contact_moment._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( total_contact_moment._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( total_contact_moment._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(total_contact_moment._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + total_contact_moment._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( total_contact_moment._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(total_contact_moment._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + total_contact_moment._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -294,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -315,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -336,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -357,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -378,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -399,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -434,13 +453,13 @@ class OutputsTotalContactMoment(_Outputs): def __init__(self, op: Operator): super().__init__(total_contact_moment._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( total_contact_moment._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/total_mass.py b/src/ansys/dpf/core/operators/result/total_mass.py index caeeddd83f3..d53e4979102 100644 --- a/src/ansys/dpf/core/operators/result/total_mass.py +++ b/src/ansys/dpf/core/operators/result/total_mass.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + class total_mass(Operator): r"""Reads total mass from mode file. @@ -142,11 +146,13 @@ class InputsTotalMass(_Inputs): def __init__(self, op: Operator): super().__init__(total_mass._spec().inputs, op) - self._data_sources = Input(total_mass._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + total_mass._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Data sources (must contain at least one mode file). @@ -181,11 +187,11 @@ class OutputsTotalMass(_Outputs): def __init__(self, op: Operator): super().__init__(total_mass._spec().outputs, op) - self._mass = Output(total_mass._spec().output_pin(0), 0, op) + self._mass: Output[float] = Output(total_mass._spec().output_pin(0), 0, op) self._outputs.append(self._mass) @property - def mass(self) -> Output: + def mass(self) -> Output[float]: r"""Allows to get mass output of the operator the unit should be grabbed from the rst file diff --git a/src/ansys/dpf/core/operators/result/total_pressure.py b/src/ansys/dpf/core/operators/result/total_pressure.py index 427bb2160f9..7b75a2f196d 100644 --- a/src/ansys/dpf/core/operators/result/total_pressure.py +++ b/src/ansys/dpf/core/operators/result/total_pressure.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class total_pressure(Operator): r"""Read Total Pressure by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsTotalPressure(_Inputs): def __init__(self, op: Operator): super().__init__(total_pressure._spec().inputs, op) - self._time_scoping = Input(total_pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + total_pressure._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(total_pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + total_pressure._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(total_pressure._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + total_pressure._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(total_pressure._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + total_pressure._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(total_pressure._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + total_pressure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(total_pressure._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + total_pressure._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(total_pressure._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + total_pressure._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(total_pressure._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + total_pressure._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsTotalPressure(_Outputs): def __init__(self, op: Operator): super().__init__(total_pressure._spec().outputs, op) - self._fields_container = Output(total_pressure._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + total_pressure._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/total_strain.py b/src/ansys/dpf/core/operators/result/total_strain.py index 22813964e87..a3801f0415f 100644 --- a/src/ansys/dpf/core/operators/result/total_strain.py +++ b/src/ansys/dpf/core/operators/result/total_strain.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class total_strain(Operator): r"""Read/compute Total strain (LSDyna) by calling the readers defined by the @@ -251,25 +262,37 @@ class InputsTotalStrain(_Inputs): def __init__(self, op: Operator): super().__init__(total_strain._spec().inputs, op) - self._time_scoping = Input(total_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + total_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(total_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + total_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(total_strain._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + total_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(total_strain._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + total_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(total_strain._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + total_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input( + self._bool_rotate_to_global: Input[bool] = Input( total_strain._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(total_strain._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + total_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -290,7 +313,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -311,7 +334,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -332,7 +355,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -353,7 +376,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -374,7 +397,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -395,7 +418,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -430,11 +453,13 @@ class OutputsTotalStrain(_Outputs): def __init__(self, op: Operator): super().__init__(total_strain._spec().outputs, op) - self._fields_container = Output(total_strain._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + total_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/total_temperature.py b/src/ansys/dpf/core/operators/result/total_temperature.py index 85586c60a45..468372e74dd 100644 --- a/src/ansys/dpf/core/operators/result/total_temperature.py +++ b/src/ansys/dpf/core/operators/result/total_temperature.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class total_temperature(Operator): r"""Read Total Temperature by calling the readers defined by the @@ -267,33 +278,41 @@ class InputsTotalTemperature(_Inputs): def __init__(self, op: Operator): super().__init__(total_temperature._spec().inputs, op) - self._time_scoping = Input(total_temperature._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + total_temperature._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(total_temperature._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + total_temperature._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( total_temperature._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(total_temperature._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + total_temperature._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(total_temperature._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + total_temperature._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( total_temperature._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( total_temperature._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( total_temperature._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,11 +494,13 @@ class OutputsTotalTemperature(_Outputs): def __init__(self, op: Operator): super().__init__(total_temperature._spec().outputs, op) - self._fields_container = Output(total_temperature._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + total_temperature._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/transform_invariant_terms_rbd.py b/src/ansys/dpf/core/operators/result/transform_invariant_terms_rbd.py index b22edaf4f76..c4720445405 100644 --- a/src/ansys/dpf/core/operators/result/transform_invariant_terms_rbd.py +++ b/src/ansys/dpf/core/operators/result/transform_invariant_terms_rbd.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.property_field import PropertyField + class transform_invariant_terms_rbd(Operator): r"""Transform invariant terms rbd based on a coordinate system (translation @@ -698,109 +703,109 @@ class InputsTransformInvariantTermsRbd(_Inputs): def __init__(self, op: Operator): super().__init__(transform_invariant_terms_rbd._spec().inputs, op) - self._rotation_matrix = Input( + self._rotation_matrix: Input[Field] = Input( transform_invariant_terms_rbd._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._rotation_matrix) - self._coordinate_system = Input( + self._coordinate_system: Input[Field] = Input( transform_invariant_terms_rbd._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._coordinate_system) - self._model_data = Input( + self._model_data: Input[PropertyField] = Input( transform_invariant_terms_rbd._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._model_data) - self._center_of_mass = Input( + self._center_of_mass: Input[Field] = Input( transform_invariant_terms_rbd._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._center_of_mass) - self._inertia_relief = Input( + self._inertia_relief: Input[Field] = Input( transform_invariant_terms_rbd._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._inertia_relief) - self._model_size = Input( + self._model_size: Input[float] = Input( transform_invariant_terms_rbd._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._model_size) - self._master_node_coordinates = Input( + self._master_node_coordinates: Input = Input( transform_invariant_terms_rbd._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._master_node_coordinates) - self._v_trsf = Input( + self._v_trsf: Input = Input( transform_invariant_terms_rbd._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._v_trsf) - self._k_mat = Input( + self._k_mat: Input[Field] = Input( transform_invariant_terms_rbd._spec().input_pin(8), 8, op, -1 ) self._inputs.append(self._k_mat) - self._mass_mat = Input( + self._mass_mat: Input[Field] = Input( transform_invariant_terms_rbd._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._mass_mat) - self._c_mat = Input( + self._c_mat: Input[Field] = Input( transform_invariant_terms_rbd._spec().input_pin(10), 10, op, -1 ) self._inputs.append(self._c_mat) - self._rhs = Input( + self._rhs: Input[Field] = Input( transform_invariant_terms_rbd._spec().input_pin(11), 11, op, -1 ) self._inputs.append(self._rhs) - self._dn = Input( + self._dn: Input = Input( transform_invariant_terms_rbd._spec().input_pin(12), 12, op, -1 ) self._inputs.append(self._dn) - self._dr_cross_n = Input( + self._dr_cross_n: Input = Input( transform_invariant_terms_rbd._spec().input_pin(13), 13, op, -1 ) self._inputs.append(self._dr_cross_n) - self._drn = Input( + self._drn: Input = Input( transform_invariant_terms_rbd._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._drn) - self._dn_cross_n = Input( + self._dn_cross_n: Input = Input( transform_invariant_terms_rbd._spec().input_pin(15), 15, op, -1 ) self._inputs.append(self._dn_cross_n) - self._dnx_y = Input( + self._dnx_y: Input = Input( transform_invariant_terms_rbd._spec().input_pin(16), 16, op, -1 ) self._inputs.append(self._dnx_y) - self._dny_y = Input( + self._dny_y: Input = Input( transform_invariant_terms_rbd._spec().input_pin(17), 17, op, -1 ) self._inputs.append(self._dny_y) - self._dnz_y = Input( + self._dnz_y: Input = Input( transform_invariant_terms_rbd._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._dnz_y) - self._dyx_n = Input( + self._dyx_n: Input = Input( transform_invariant_terms_rbd._spec().input_pin(19), 19, op, -1 ) self._inputs.append(self._dyx_n) - self._dyy_n = Input( + self._dyy_n: Input = Input( transform_invariant_terms_rbd._spec().input_pin(20), 20, op, -1 ) self._inputs.append(self._dyy_n) - self._dyz_n = Input( + self._dyz_n: Input = Input( transform_invariant_terms_rbd._spec().input_pin(21), 21, op, -1 ) self._inputs.append(self._dyz_n) - self._dnxn = Input( + self._dnxn: Input = Input( transform_invariant_terms_rbd._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._dnxn) - self._dnyn = Input( + self._dnyn: Input = Input( transform_invariant_terms_rbd._spec().input_pin(23), 23, op, -1 ) self._inputs.append(self._dnyn) - self._dnzn = Input( + self._dnzn: Input = Input( transform_invariant_terms_rbd._spec().input_pin(24), 24, op, -1 ) self._inputs.append(self._dnzn) @property - def rotation_matrix(self) -> Input: + def rotation_matrix(self) -> Input[Field]: r"""Allows to connect rotation_matrix input to the operator. 3-3 rotation matrix. @@ -821,7 +826,7 @@ def rotation_matrix(self) -> Input: return self._rotation_matrix @property - def coordinate_system(self) -> Input: + def coordinate_system(self) -> Input[Field]: r"""Allows to connect coordinate_system input to the operator. origin of the new coordinate system. @@ -842,7 +847,7 @@ def coordinate_system(self) -> Input: return self._coordinate_system @property - def model_data(self) -> Input: + def model_data(self) -> Input[PropertyField]: r"""Allows to connect model_data input to the operator. data describing the finite element model @@ -863,7 +868,7 @@ def model_data(self) -> Input: return self._model_data @property - def center_of_mass(self) -> Input: + def center_of_mass(self) -> Input[Field]: r"""Allows to connect center_of_mass input to the operator. center of mass of the body @@ -884,7 +889,7 @@ def center_of_mass(self) -> Input: return self._center_of_mass @property - def inertia_relief(self) -> Input: + def inertia_relief(self) -> Input[Field]: r"""Allows to connect inertia_relief input to the operator. inertia matrix @@ -905,7 +910,7 @@ def inertia_relief(self) -> Input: return self._inertia_relief @property - def model_size(self) -> Input: + def model_size(self) -> Input[float]: r"""Allows to connect model_size input to the operator. size of the diagonal box containing the body @@ -966,7 +971,7 @@ def v_trsf(self) -> Input: return self._v_trsf @property - def k_mat(self) -> Input: + def k_mat(self) -> Input[Field]: r"""Allows to connect k_mat input to the operator. Returns @@ -985,7 +990,7 @@ def k_mat(self) -> Input: return self._k_mat @property - def mass_mat(self) -> Input: + def mass_mat(self) -> Input[Field]: r"""Allows to connect mass_mat input to the operator. Returns @@ -1004,7 +1009,7 @@ def mass_mat(self) -> Input: return self._mass_mat @property - def c_mat(self) -> Input: + def c_mat(self) -> Input[Field]: r"""Allows to connect c_mat input to the operator. Returns @@ -1023,7 +1028,7 @@ def c_mat(self) -> Input: return self._c_mat @property - def rhs(self) -> Input: + def rhs(self) -> Input[Field]: r"""Allows to connect rhs input to the operator. Returns @@ -1325,91 +1330,101 @@ class OutputsTransformInvariantTermsRbd(_Outputs): def __init__(self, op: Operator): super().__init__(transform_invariant_terms_rbd._spec().outputs, op) - self._model_data = Output( + self._model_data: Output[PropertyField] = Output( transform_invariant_terms_rbd._spec().output_pin(0), 0, op ) self._outputs.append(self._model_data) - self._center_of_mass = Output( + self._center_of_mass: Output[Field] = Output( transform_invariant_terms_rbd._spec().output_pin(1), 1, op ) self._outputs.append(self._center_of_mass) - self._inertia_relief = Output( + self._inertia_relief: Output[Field] = Output( transform_invariant_terms_rbd._spec().output_pin(2), 2, op ) self._outputs.append(self._inertia_relief) - self._model_size = Output( + self._model_size: Output[PropertyField] = Output( transform_invariant_terms_rbd._spec().output_pin(3), 3, op ) self._outputs.append(self._model_size) - self._master_node_coordinates = Output( + self._master_node_coordinates: Output = Output( transform_invariant_terms_rbd._spec().output_pin(4), 4, op ) self._outputs.append(self._master_node_coordinates) - self._v_trsf = Output( + self._v_trsf: Output = Output( transform_invariant_terms_rbd._spec().output_pin(5), 5, op ) self._outputs.append(self._v_trsf) - self._k_mat = Output(transform_invariant_terms_rbd._spec().output_pin(6), 6, op) + self._k_mat: Output[Field] = Output( + transform_invariant_terms_rbd._spec().output_pin(6), 6, op + ) self._outputs.append(self._k_mat) - self._mass_mat = Output( + self._mass_mat: Output[Field] = Output( transform_invariant_terms_rbd._spec().output_pin(7), 7, op ) self._outputs.append(self._mass_mat) - self._c_mat = Output(transform_invariant_terms_rbd._spec().output_pin(8), 8, op) + self._c_mat: Output[Field] = Output( + transform_invariant_terms_rbd._spec().output_pin(8), 8, op + ) self._outputs.append(self._c_mat) - self._rhs = Output(transform_invariant_terms_rbd._spec().output_pin(9), 9, op) + self._rhs: Output[Field] = Output( + transform_invariant_terms_rbd._spec().output_pin(9), 9, op + ) self._outputs.append(self._rhs) - self._dn = Output(transform_invariant_terms_rbd._spec().output_pin(10), 10, op) + self._dn: Output = Output( + transform_invariant_terms_rbd._spec().output_pin(10), 10, op + ) self._outputs.append(self._dn) - self._dr_cross_n = Output( + self._dr_cross_n: Output = Output( transform_invariant_terms_rbd._spec().output_pin(11), 11, op ) self._outputs.append(self._dr_cross_n) - self._drn = Output(transform_invariant_terms_rbd._spec().output_pin(12), 12, op) + self._drn: Output = Output( + transform_invariant_terms_rbd._spec().output_pin(12), 12, op + ) self._outputs.append(self._drn) - self._dn_cross_n = Output( + self._dn_cross_n: Output = Output( transform_invariant_terms_rbd._spec().output_pin(13), 13, op ) self._outputs.append(self._dn_cross_n) - self._dnx_y = Output( + self._dnx_y: Output = Output( transform_invariant_terms_rbd._spec().output_pin(14), 14, op ) self._outputs.append(self._dnx_y) - self._dny_y = Output( + self._dny_y: Output = Output( transform_invariant_terms_rbd._spec().output_pin(15), 15, op ) self._outputs.append(self._dny_y) - self._dnz_y = Output( + self._dnz_y: Output = Output( transform_invariant_terms_rbd._spec().output_pin(16), 16, op ) self._outputs.append(self._dnz_y) - self._dyx_n = Output( + self._dyx_n: Output = Output( transform_invariant_terms_rbd._spec().output_pin(17), 17, op ) self._outputs.append(self._dyx_n) - self._dyy_n = Output( + self._dyy_n: Output = Output( transform_invariant_terms_rbd._spec().output_pin(18), 18, op ) self._outputs.append(self._dyy_n) - self._dyz_n = Output( + self._dyz_n: Output = Output( transform_invariant_terms_rbd._spec().output_pin(19), 19, op ) self._outputs.append(self._dyz_n) - self._dnxn = Output( + self._dnxn: Output = Output( transform_invariant_terms_rbd._spec().output_pin(20), 20, op ) self._outputs.append(self._dnxn) - self._dnyn = Output( + self._dnyn: Output = Output( transform_invariant_terms_rbd._spec().output_pin(21), 21, op ) self._outputs.append(self._dnyn) - self._dnzn = Output( + self._dnzn: Output = Output( transform_invariant_terms_rbd._spec().output_pin(22), 22, op ) self._outputs.append(self._dnzn) @property - def model_data(self) -> Output: + def model_data(self) -> Output[PropertyField]: r"""Allows to get model_data output of the operator data describing the finite element model @@ -1429,7 +1444,7 @@ def model_data(self) -> Output: return self._model_data @property - def center_of_mass(self) -> Output: + def center_of_mass(self) -> Output[Field]: r"""Allows to get center_of_mass output of the operator center of mass of the body @@ -1449,7 +1464,7 @@ def center_of_mass(self) -> Output: return self._center_of_mass @property - def inertia_relief(self) -> Output: + def inertia_relief(self) -> Output[Field]: r"""Allows to get inertia_relief output of the operator inertia matrix @@ -1469,7 +1484,7 @@ def inertia_relief(self) -> Output: return self._inertia_relief @property - def model_size(self) -> Output: + def model_size(self) -> Output[PropertyField]: r"""Allows to get model_size output of the operator Returns @@ -1525,7 +1540,7 @@ def v_trsf(self) -> Output: return self._v_trsf @property - def k_mat(self) -> Output: + def k_mat(self) -> Output[Field]: r"""Allows to get k_mat output of the operator Returns @@ -1543,7 +1558,7 @@ def k_mat(self) -> Output: return self._k_mat @property - def mass_mat(self) -> Output: + def mass_mat(self) -> Output[Field]: r"""Allows to get mass_mat output of the operator Returns @@ -1561,7 +1576,7 @@ def mass_mat(self) -> Output: return self._mass_mat @property - def c_mat(self) -> Output: + def c_mat(self) -> Output[Field]: r"""Allows to get c_mat output of the operator Returns @@ -1579,7 +1594,7 @@ def c_mat(self) -> Output: return self._c_mat @property - def rhs(self) -> Output: + def rhs(self) -> Output[Field]: r"""Allows to get rhs output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/transient_rayleigh_integration.py b/src/ansys/dpf/core/operators/result/transient_rayleigh_integration.py index 5c44a91c6f2..d8f4f400c7e 100644 --- a/src/ansys/dpf/core/operators/result/transient_rayleigh_integration.py +++ b/src/ansys/dpf/core/operators/result/transient_rayleigh_integration.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + class transient_rayleigh_integration(Operator): r"""Computes the transient Rayleigh integral @@ -246,37 +254,37 @@ class InputsTransientRayleighIntegration(_Inputs): def __init__(self, op: Operator): super().__init__(transient_rayleigh_integration._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( transient_rayleigh_integration._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._mesh = Input( + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( transient_rayleigh_integration._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh) - self._time_scoping = Input( + self._time_scoping: Input[int | Scoping] = Input( transient_rayleigh_integration._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._time_scoping) - self._field = Input( + self._field: Input[Field] = Input( transient_rayleigh_integration._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._field) - self._observation_mesh = Input( + self._observation_mesh: Input[MeshedRegion] = Input( transient_rayleigh_integration._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._observation_mesh) - self._mass_density = Input( + self._mass_density: Input[float] = Input( transient_rayleigh_integration._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._mass_density) - self._speed_of_sound = Input( + self._speed_of_sound: Input[float] = Input( transient_rayleigh_integration._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._speed_of_sound) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. the input field container expects acceleration fields @@ -297,7 +305,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. The meshes region in this pin has to be boundary or skin mesh. This is the source meshes. @@ -318,7 +326,7 @@ def mesh(self) -> Input: return self._mesh @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[int | Scoping]: r"""Allows to connect time_scoping input to the operator. Load step number (if it's specified, the Transient rayleigh integration is computed only on the substeps of this step) or time scoping @@ -339,7 +347,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. The field represents the coordinates of the observation position. It should be specified if not observation mesh is provided. @@ -360,7 +368,7 @@ def field(self) -> Input: return self._field @property - def observation_mesh(self) -> Input: + def observation_mesh(self) -> Input[MeshedRegion]: r"""Allows to connect observation_mesh input to the operator. This is the observation mesh region @@ -381,7 +389,7 @@ def observation_mesh(self) -> Input: return self._observation_mesh @property - def mass_density(self) -> Input: + def mass_density(self) -> Input[float]: r"""Allows to connect mass_density input to the operator. Mass density (if it's not specified, default value of the air is applied). @@ -402,7 +410,7 @@ def mass_density(self) -> Input: return self._mass_density @property - def speed_of_sound(self) -> Input: + def speed_of_sound(self) -> Input[float]: r"""Allows to connect speed_of_sound input to the operator. Speed of sound (if it's not specified, default value of the speed of sound in the air is applied). @@ -437,13 +445,13 @@ class OutputsTransientRayleighIntegration(_Outputs): def __init__(self, op: Operator): super().__init__(transient_rayleigh_integration._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( transient_rayleigh_integration._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/turbulent_kinetic_energy.py b/src/ansys/dpf/core/operators/result/turbulent_kinetic_energy.py index e454180918a..f61423be318 100644 --- a/src/ansys/dpf/core/operators/result/turbulent_kinetic_energy.py +++ b/src/ansys/dpf/core/operators/result/turbulent_kinetic_energy.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class turbulent_kinetic_energy(Operator): r"""Read Turbulent Kinetic Energy (k) by calling the readers defined by the @@ -267,39 +278,41 @@ class InputsTurbulentKineticEnergy(_Inputs): def __init__(self, op: Operator): super().__init__(turbulent_kinetic_energy._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping | int | float | Field] = Input( turbulent_kinetic_energy._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( turbulent_kinetic_energy._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( turbulent_kinetic_energy._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( turbulent_kinetic_energy._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._mesh = Input(turbulent_kinetic_energy._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + turbulent_kinetic_energy._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( turbulent_kinetic_energy._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( turbulent_kinetic_energy._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( turbulent_kinetic_energy._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -320,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -341,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -362,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -383,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -404,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -425,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -446,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -481,13 +494,13 @@ class OutputsTurbulentKineticEnergy(_Outputs): def __init__(self, op: Operator): super().__init__(turbulent_kinetic_energy._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( turbulent_kinetic_energy._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/turbulent_viscosity.py b/src/ansys/dpf/core/operators/result/turbulent_viscosity.py index 225e9179e98..961ba1c505d 100644 --- a/src/ansys/dpf/core/operators/result/turbulent_viscosity.py +++ b/src/ansys/dpf/core/operators/result/turbulent_viscosity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class turbulent_viscosity(Operator): r"""Read Turbulent Viscosity by calling the readers defined by the @@ -267,33 +278,41 @@ class InputsTurbulentViscosity(_Inputs): def __init__(self, op: Operator): super().__init__(turbulent_viscosity._spec().inputs, op) - self._time_scoping = Input(turbulent_viscosity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + turbulent_viscosity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(turbulent_viscosity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + turbulent_viscosity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( turbulent_viscosity._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(turbulent_viscosity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + turbulent_viscosity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(turbulent_viscosity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + turbulent_viscosity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( turbulent_viscosity._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( turbulent_viscosity._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( turbulent_viscosity._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,13 +494,13 @@ class OutputsTurbulentViscosity(_Outputs): def __init__(self, op: Operator): super().__init__(turbulent_viscosity._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( turbulent_viscosity._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/velocity.py b/src/ansys/dpf/core/operators/result/velocity.py index b8790ff9b81..6dea6938b58 100644 --- a/src/ansys/dpf/core/operators/result/velocity.py +++ b/src/ansys/dpf/core/operators/result/velocity.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class velocity(Operator): r"""Read/compute nodal velocities by calling the readers defined by the @@ -363,37 +374,63 @@ class InputsVelocity(_Inputs): def __init__(self, op: Operator): super().__init__(velocity._spec().inputs, op) - self._time_scoping = Input(velocity._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + velocity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(velocity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + velocity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(velocity._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + velocity._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(velocity._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + velocity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(velocity._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + velocity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(velocity._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + velocity._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(velocity._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + velocity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(velocity._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + velocity._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(velocity._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region: Input[MeshedRegion | MeshesContainer] = Input( + velocity._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(velocity._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand: Input[Scoping | ScopingsContainer] = Input( + velocity._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(velocity._spec().input_pin(19), 19, op, -1) + self._phi: Input[float] = Input(velocity._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) - self._region_scoping = Input(velocity._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + velocity._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(velocity._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + velocity._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(velocity._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + velocity._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -414,7 +451,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -435,7 +472,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields container already allocated modified inplace @@ -456,7 +493,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -477,7 +514,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -498,7 +535,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true). Please check your results carefully if 'false' is used for Elemental or ElementalNodal results averaged to the Nodes when adjacent elements do not share the same coordinate system, as results may be incorrect. @@ -519,7 +556,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. mesh. If cylic expansion is to be done, mesh of the base sector @@ -540,7 +577,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -561,7 +598,7 @@ def read_cyclic(self) -> Input: return self._read_cyclic @property - def expanded_meshed_region(self) -> Input: + def expanded_meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect expanded_meshed_region input to the operator. mesh expanded, use if cyclic expansion is to be done. @@ -582,7 +619,7 @@ def expanded_meshed_region(self) -> Input: return self._expanded_meshed_region @property - def sectors_to_expand(self) -> Input: + def sectors_to_expand(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect sectors_to_expand input to the operator. sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done. @@ -603,7 +640,7 @@ def sectors_to_expand(self) -> Input: return self._sectors_to_expand @property - def phi(self) -> Input: + def phi(self) -> Input[float]: r"""Allows to connect phi input to the operator. angle phi in degrees (default value 0.0), use if cyclic expansion is to be done. @@ -624,7 +661,7 @@ def phi(self) -> Input: return self._phi @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -645,7 +682,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -666,7 +703,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -701,11 +738,13 @@ class OutputsVelocity(_Outputs): def __init__(self, op: Operator): super().__init__(velocity._spec().outputs, op) - self._fields_container = Output(velocity._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + velocity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/velocity_X.py b/src/ansys/dpf/core/operators/result/velocity_X.py index 0fd6d1b8493..2baaea89b7f 100644 --- a/src/ansys/dpf/core/operators/result/velocity_X.py +++ b/src/ansys/dpf/core/operators/result/velocity_X.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class velocity_X(Operator): r"""Read/compute nodal velocities X component of the vector (1st component) @@ -267,25 +278,41 @@ class InputsVelocityX(_Inputs): def __init__(self, op: Operator): super().__init__(velocity_X._spec().inputs, op) - self._time_scoping = Input(velocity_X._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + velocity_X._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(velocity_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + velocity_X._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(velocity_X._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + velocity_X._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(velocity_X._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + velocity_X._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(velocity_X._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + velocity_X._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(velocity_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + velocity_X._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(velocity_X._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + velocity_X._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(velocity_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + velocity_X._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -306,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -327,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -348,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -369,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -390,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -411,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -432,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -467,11 +494,13 @@ class OutputsVelocityX(_Outputs): def __init__(self, op: Operator): super().__init__(velocity_X._spec().outputs, op) - self._fields_container = Output(velocity_X._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + velocity_X._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/velocity_Y.py b/src/ansys/dpf/core/operators/result/velocity_Y.py index e157e4134c4..b1529c4a454 100644 --- a/src/ansys/dpf/core/operators/result/velocity_Y.py +++ b/src/ansys/dpf/core/operators/result/velocity_Y.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class velocity_Y(Operator): r"""Read/compute nodal velocities Y component of the vector (2nd component) @@ -267,25 +278,41 @@ class InputsVelocityY(_Inputs): def __init__(self, op: Operator): super().__init__(velocity_Y._spec().inputs, op) - self._time_scoping = Input(velocity_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + velocity_Y._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(velocity_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + velocity_Y._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(velocity_Y._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + velocity_Y._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(velocity_Y._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + velocity_Y._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(velocity_Y._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + velocity_Y._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(velocity_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + velocity_Y._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(velocity_Y._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + velocity_Y._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(velocity_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + velocity_Y._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -306,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -327,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -348,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -369,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -390,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -411,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -432,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -467,11 +494,13 @@ class OutputsVelocityY(_Outputs): def __init__(self, op: Operator): super().__init__(velocity_Y._spec().outputs, op) - self._fields_container = Output(velocity_Y._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + velocity_Y._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/velocity_Z.py b/src/ansys/dpf/core/operators/result/velocity_Z.py index 96c126ad8c7..0f7ad6b5a62 100644 --- a/src/ansys/dpf/core/operators/result/velocity_Z.py +++ b/src/ansys/dpf/core/operators/result/velocity_Z.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class velocity_Z(Operator): r"""Read/compute nodal velocities Z component of the vector (3rd component) @@ -267,25 +278,41 @@ class InputsVelocityZ(_Inputs): def __init__(self, op: Operator): super().__init__(velocity_Z._spec().inputs, op) - self._time_scoping = Input(velocity_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + velocity_Z._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(velocity_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + velocity_Z._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(velocity_Z._spec().input_pin(2), 2, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + velocity_Z._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(velocity_Z._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + velocity_Z._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(velocity_Z._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + velocity_Z._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(velocity_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global: Input[bool] = Input( + velocity_Z._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(velocity_Z._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + velocity_Z._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._read_cyclic = Input(velocity_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic: Input[int] = Input( + velocity_Z._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -306,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -327,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. FieldsContainer already allocated modified inplace @@ -348,7 +375,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -369,7 +396,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -390,7 +417,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def bool_rotate_to_global(self) -> Input: + def bool_rotate_to_global(self) -> Input[bool]: r"""Allows to connect bool_rotate_to_global input to the operator. if true the field is rotated to global coordinate system (default true) @@ -411,7 +438,7 @@ def bool_rotate_to_global(self) -> Input: return self._bool_rotate_to_global @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -432,7 +459,7 @@ def mesh(self) -> Input: return self._mesh @property - def read_cyclic(self) -> Input: + def read_cyclic(self) -> Input[int]: r"""Allows to connect read_cyclic input to the operator. if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) @@ -467,11 +494,13 @@ class OutputsVelocityZ(_Outputs): def __init__(self, op: Operator): super().__init__(velocity_Z._spec().outputs, op) - self._fields_container = Output(velocity_Z._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + velocity_Z._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/volume_fraction.py b/src/ansys/dpf/core/operators/result/volume_fraction.py index 650ea97766e..bba7a1f792c 100644 --- a/src/ansys/dpf/core/operators/result/volume_fraction.py +++ b/src/ansys/dpf/core/operators/result/volume_fraction.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class volume_fraction(Operator): r"""Read Volume Fraction by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsVolumeFraction(_Inputs): def __init__(self, op: Operator): super().__init__(volume_fraction._spec().inputs, op) - self._time_scoping = Input(volume_fraction._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + volume_fraction._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(volume_fraction._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + volume_fraction._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(volume_fraction._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + volume_fraction._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(volume_fraction._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + volume_fraction._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(volume_fraction._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + volume_fraction._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(volume_fraction._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + volume_fraction._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(volume_fraction._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + volume_fraction._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(volume_fraction._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + volume_fraction._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsVolumeFraction(_Outputs): def __init__(self, op: Operator): super().__init__(volume_fraction._spec().outputs, op) - self._fields_container = Output(volume_fraction._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + volume_fraction._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/wall_shear_stress.py b/src/ansys/dpf/core/operators/result/wall_shear_stress.py index 2799530c25b..d58cde9a163 100644 --- a/src/ansys/dpf/core/operators/result/wall_shear_stress.py +++ b/src/ansys/dpf/core/operators/result/wall_shear_stress.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class wall_shear_stress(Operator): r"""Read Wall Shear Stress by calling the readers defined by the @@ -267,33 +278,41 @@ class InputsWallShearStress(_Inputs): def __init__(self, op: Operator): super().__init__(wall_shear_stress._spec().inputs, op) - self._time_scoping = Input(wall_shear_stress._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + wall_shear_stress._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(wall_shear_stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + wall_shear_stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( wall_shear_stress._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(wall_shear_stress._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + wall_shear_stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(wall_shear_stress._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + wall_shear_stress._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input( + self._region_scoping: Input[Scoping | int] = Input( wall_shear_stress._spec().input_pin(25), 25, op, -1 ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input( + self._qualifiers1: Input[dict] = Input( wall_shear_stress._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input( + self._qualifiers2: Input[dict] = Input( wall_shear_stress._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -314,7 +333,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -335,7 +354,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -356,7 +375,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -377,7 +396,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -398,7 +417,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -419,7 +438,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -440,7 +459,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -475,11 +494,13 @@ class OutputsWallShearStress(_Outputs): def __init__(self, op: Operator): super().__init__(wall_shear_stress._spec().outputs, op) - self._fields_container = Output(wall_shear_stress._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + wall_shear_stress._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/workflow_energy_per_component.py b/src/ansys/dpf/core/operators/result/workflow_energy_per_component.py index 92271cab2bb..ec7746e7004 100644 --- a/src/ansys/dpf/core/operators/result/workflow_energy_per_component.py +++ b/src/ansys/dpf/core/operators/result/workflow_energy_per_component.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class workflow_energy_per_component(Operator): r"""Calculates the cumulated energy per component (Named Selection). For @@ -301,37 +309,37 @@ class InputsWorkflowEnergyPerComponent(_Inputs): def __init__(self, op: Operator): super().__init__(workflow_energy_per_component._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping] = Input( workflow_energy_per_component._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( workflow_energy_per_component._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._energy_type = Input( + self._energy_type: Input[int] = Input( workflow_energy_per_component._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._energy_type) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( workflow_energy_per_component._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( workflow_energy_per_component._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._named_selection1 = Input( + self._named_selection1: Input[str] = Input( workflow_energy_per_component._spec().input_pin(5), 5, op, 0 ) self._inputs.append(self._named_selection1) - self._named_selection2 = Input( + self._named_selection2: Input[str] = Input( workflow_energy_per_component._spec().input_pin(6), 6, op, 1 ) self._inputs.append(self._named_selection2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -350,7 +358,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. When the input is a scoping, it is treated as the master scoping. All named selections will intersect with it. When the input is a scopings container, named selections will not be needed. @@ -371,7 +379,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def energy_type(self) -> Input: + def energy_type(self) -> Input[int]: r"""Allows to connect energy_type input to the operator. Type of energy to be processed: (0: Strain + Kinetic energy (default), 1: Strain energy, 2: Kinetic energy, 3: All energy types) @@ -392,7 +400,7 @@ def energy_type(self) -> Input: return self._energy_type @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -411,7 +419,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -430,7 +438,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def named_selection1(self) -> Input: + def named_selection1(self) -> Input[str]: r"""Allows to connect named_selection1 input to the operator. Named Selections. Intersection of all Named Selections with the master scoping will be done. @@ -451,7 +459,7 @@ def named_selection1(self) -> Input: return self._named_selection1 @property - def named_selection2(self) -> Input: + def named_selection2(self) -> Input[str]: r"""Allows to connect named_selection2 input to the operator. Named Selections. Intersection of all Named Selections with the master scoping will be done. @@ -493,41 +501,41 @@ class OutputsWorkflowEnergyPerComponent(_Outputs): def __init__(self, op: Operator): super().__init__(workflow_energy_per_component._spec().outputs, op) - self._component_energy = Output( + self._component_energy: Output[FieldsContainer] = Output( workflow_energy_per_component._spec().output_pin(0), 0, op ) self._outputs.append(self._component_energy) - self._component_energy_percentage = Output( + self._component_energy_percentage: Output[FieldsContainer] = Output( workflow_energy_per_component._spec().output_pin(1), 1, op ) self._outputs.append(self._component_energy_percentage) - self._component_total_energy = Output( + self._component_total_energy: Output[FieldsContainer] = Output( workflow_energy_per_component._spec().output_pin(2), 2, op ) self._outputs.append(self._component_total_energy) - self._component_total_energy_percentage = Output( + self._component_total_energy_percentage: Output[FieldsContainer] = Output( workflow_energy_per_component._spec().output_pin(3), 3, op ) self._outputs.append(self._component_total_energy_percentage) - self._component_strain_energy = Output( + self._component_strain_energy: Output[FieldsContainer] = Output( workflow_energy_per_component._spec().output_pin(4), 4, op ) self._outputs.append(self._component_strain_energy) - self._component_strain_energy_percentage = Output( + self._component_strain_energy_percentage: Output[FieldsContainer] = Output( workflow_energy_per_component._spec().output_pin(5), 5, op ) self._outputs.append(self._component_strain_energy_percentage) - self._component_kinetic_energy = Output( + self._component_kinetic_energy: Output[FieldsContainer] = Output( workflow_energy_per_component._spec().output_pin(6), 6, op ) self._outputs.append(self._component_kinetic_energy) - self._component_kinetic_energy_percentage = Output( + self._component_kinetic_energy_percentage: Output[FieldsContainer] = Output( workflow_energy_per_component._spec().output_pin(7), 7, op ) self._outputs.append(self._component_kinetic_energy_percentage) @property - def component_energy(self) -> Output: + def component_energy(self) -> Output[FieldsContainer]: r"""Allows to get component_energy output of the operator Returns @@ -545,7 +553,7 @@ def component_energy(self) -> Output: return self._component_energy @property - def component_energy_percentage(self) -> Output: + def component_energy_percentage(self) -> Output[FieldsContainer]: r"""Allows to get component_energy_percentage output of the operator Returns @@ -563,7 +571,7 @@ def component_energy_percentage(self) -> Output: return self._component_energy_percentage @property - def component_total_energy(self) -> Output: + def component_total_energy(self) -> Output[FieldsContainer]: r"""Allows to get component_total_energy output of the operator Returns @@ -581,7 +589,7 @@ def component_total_energy(self) -> Output: return self._component_total_energy @property - def component_total_energy_percentage(self) -> Output: + def component_total_energy_percentage(self) -> Output[FieldsContainer]: r"""Allows to get component_total_energy_percentage output of the operator Returns @@ -599,7 +607,7 @@ def component_total_energy_percentage(self) -> Output: return self._component_total_energy_percentage @property - def component_strain_energy(self) -> Output: + def component_strain_energy(self) -> Output[FieldsContainer]: r"""Allows to get component_strain_energy output of the operator Returns @@ -617,7 +625,7 @@ def component_strain_energy(self) -> Output: return self._component_strain_energy @property - def component_strain_energy_percentage(self) -> Output: + def component_strain_energy_percentage(self) -> Output[FieldsContainer]: r"""Allows to get component_strain_energy_percentage output of the operator Returns @@ -635,7 +643,7 @@ def component_strain_energy_percentage(self) -> Output: return self._component_strain_energy_percentage @property - def component_kinetic_energy(self) -> Output: + def component_kinetic_energy(self) -> Output[FieldsContainer]: r"""Allows to get component_kinetic_energy output of the operator Returns @@ -653,7 +661,7 @@ def component_kinetic_energy(self) -> Output: return self._component_kinetic_energy @property - def component_kinetic_energy_percentage(self) -> Output: + def component_kinetic_energy_percentage(self) -> Output[FieldsContainer]: r"""Allows to get component_kinetic_energy_percentage output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/workflow_energy_per_harmonic.py b/src/ansys/dpf/core/operators/result/workflow_energy_per_harmonic.py index 54a8f47c40d..dd7afdc8eab 100644 --- a/src/ansys/dpf/core/operators/result/workflow_energy_per_harmonic.py +++ b/src/ansys/dpf/core/operators/result/workflow_energy_per_harmonic.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class workflow_energy_per_harmonic(Operator): r"""Calculates the cumulated energy per harmonic in a multistage @@ -221,29 +227,29 @@ class InputsWorkflowEnergyPerHarmonic(_Inputs): def __init__(self, op: Operator): super().__init__(workflow_energy_per_harmonic._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping] = Input( workflow_energy_per_harmonic._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( workflow_energy_per_harmonic._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._energy_type = Input( + self._energy_type: Input[int] = Input( workflow_energy_per_harmonic._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._energy_type) - self._stream = Input( + self._stream: Input = Input( workflow_energy_per_harmonic._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._stream) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( workflow_energy_per_harmonic._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -262,7 +268,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Master scoping. All harmonics will be intersected with this scoping. @@ -283,7 +289,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def energy_type(self) -> Input: + def energy_type(self) -> Input[int]: r"""Allows to connect energy_type input to the operator. Type of energy to be processed: (0: Strain + Kinetic energy (default), 1: Strain energy, 2: Kinetic energy) @@ -323,7 +329,7 @@ def stream(self) -> Input: return self._stream @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -357,17 +363,17 @@ class OutputsWorkflowEnergyPerHarmonic(_Outputs): def __init__(self, op: Operator): super().__init__(workflow_energy_per_harmonic._spec().outputs, op) - self._harmonic_energy = Output( + self._harmonic_energy: Output[FieldsContainer] = Output( workflow_energy_per_harmonic._spec().output_pin(0), 0, op ) self._outputs.append(self._harmonic_energy) - self._harmonic_energy_percentage = Output( + self._harmonic_energy_percentage: Output[FieldsContainer] = Output( workflow_energy_per_harmonic._spec().output_pin(1), 1, op ) self._outputs.append(self._harmonic_energy_percentage) @property - def harmonic_energy(self) -> Output: + def harmonic_energy(self) -> Output[FieldsContainer]: r"""Allows to get harmonic_energy output of the operator Returns @@ -385,7 +391,7 @@ def harmonic_energy(self) -> Output: return self._harmonic_energy @property - def harmonic_energy_percentage(self) -> Output: + def harmonic_energy_percentage(self) -> Output[FieldsContainer]: r"""Allows to get harmonic_energy_percentage output of the operator Returns diff --git a/src/ansys/dpf/core/operators/result/write_cms_rbd_file.py b/src/ansys/dpf/core/operators/result/write_cms_rbd_file.py index 0caa2f8e82c..da82148aaef 100644 --- a/src/ansys/dpf/core/operators/result/write_cms_rbd_file.py +++ b/src/ansys/dpf/core/operators/result/write_cms_rbd_file.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.property_field import PropertyField + class write_cms_rbd_file(Operator): r"""Write the invariant terms and the model data in a cms_rbd file @@ -497,59 +503,81 @@ class InputsWriteCmsRbdFile(_Inputs): def __init__(self, op: Operator): super().__init__(write_cms_rbd_file._spec().inputs, op) - self._model_data = Input(write_cms_rbd_file._spec().input_pin(0), 0, op, -1) + self._model_data: Input[PropertyField] = Input( + write_cms_rbd_file._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._model_data) - self._center_of_mass = Input(write_cms_rbd_file._spec().input_pin(1), 1, op, -1) + self._center_of_mass: Input[Field] = Input( + write_cms_rbd_file._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._center_of_mass) - self._inertia_relief = Input(write_cms_rbd_file._spec().input_pin(2), 2, op, -1) + self._inertia_relief: Input[Field] = Input( + write_cms_rbd_file._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._inertia_relief) - self._model_size = Input(write_cms_rbd_file._spec().input_pin(3), 3, op, -1) + self._model_size: Input[float] = Input( + write_cms_rbd_file._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._model_size) - self._master_node_coordinates = Input( + self._master_node_coordinates: Input = Input( write_cms_rbd_file._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._master_node_coordinates) - self._v_trsf = Input(write_cms_rbd_file._spec().input_pin(5), 5, op, -1) + self._v_trsf: Input = Input(write_cms_rbd_file._spec().input_pin(5), 5, op, -1) self._inputs.append(self._v_trsf) - self._k_mat = Input(write_cms_rbd_file._spec().input_pin(6), 6, op, -1) + self._k_mat: Input[Field] = Input( + write_cms_rbd_file._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._k_mat) - self._mass_mat = Input(write_cms_rbd_file._spec().input_pin(7), 7, op, -1) + self._mass_mat: Input[Field] = Input( + write_cms_rbd_file._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mass_mat) - self._c_mat = Input(write_cms_rbd_file._spec().input_pin(8), 8, op, -1) + self._c_mat: Input[Field] = Input( + write_cms_rbd_file._spec().input_pin(8), 8, op, -1 + ) self._inputs.append(self._c_mat) - self._rhs = Input(write_cms_rbd_file._spec().input_pin(9), 9, op, -1) + self._rhs: Input[Field] = Input( + write_cms_rbd_file._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._rhs) - self._dn = Input(write_cms_rbd_file._spec().input_pin(10), 10, op, -1) + self._dn: Input = Input(write_cms_rbd_file._spec().input_pin(10), 10, op, -1) self._inputs.append(self._dn) - self._dr_cross_n = Input(write_cms_rbd_file._spec().input_pin(11), 11, op, -1) + self._dr_cross_n: Input = Input( + write_cms_rbd_file._spec().input_pin(11), 11, op, -1 + ) self._inputs.append(self._dr_cross_n) - self._drn = Input(write_cms_rbd_file._spec().input_pin(12), 12, op, -1) + self._drn: Input = Input(write_cms_rbd_file._spec().input_pin(12), 12, op, -1) self._inputs.append(self._drn) - self._dn_cross_n = Input(write_cms_rbd_file._spec().input_pin(13), 13, op, -1) + self._dn_cross_n: Input = Input( + write_cms_rbd_file._spec().input_pin(13), 13, op, -1 + ) self._inputs.append(self._dn_cross_n) - self._dnx_y = Input(write_cms_rbd_file._spec().input_pin(14), 14, op, -1) + self._dnx_y: Input = Input(write_cms_rbd_file._spec().input_pin(14), 14, op, -1) self._inputs.append(self._dnx_y) - self._dny_y = Input(write_cms_rbd_file._spec().input_pin(15), 15, op, -1) + self._dny_y: Input = Input(write_cms_rbd_file._spec().input_pin(15), 15, op, -1) self._inputs.append(self._dny_y) - self._dnz_y = Input(write_cms_rbd_file._spec().input_pin(16), 16, op, -1) + self._dnz_y: Input = Input(write_cms_rbd_file._spec().input_pin(16), 16, op, -1) self._inputs.append(self._dnz_y) - self._dyx_n = Input(write_cms_rbd_file._spec().input_pin(17), 17, op, -1) + self._dyx_n: Input = Input(write_cms_rbd_file._spec().input_pin(17), 17, op, -1) self._inputs.append(self._dyx_n) - self._dyy_n = Input(write_cms_rbd_file._spec().input_pin(18), 18, op, -1) + self._dyy_n: Input = Input(write_cms_rbd_file._spec().input_pin(18), 18, op, -1) self._inputs.append(self._dyy_n) - self._dyz_n = Input(write_cms_rbd_file._spec().input_pin(19), 19, op, -1) + self._dyz_n: Input = Input(write_cms_rbd_file._spec().input_pin(19), 19, op, -1) self._inputs.append(self._dyz_n) - self._dnxn = Input(write_cms_rbd_file._spec().input_pin(20), 20, op, -1) + self._dnxn: Input = Input(write_cms_rbd_file._spec().input_pin(20), 20, op, -1) self._inputs.append(self._dnxn) - self._dnyn = Input(write_cms_rbd_file._spec().input_pin(21), 21, op, -1) + self._dnyn: Input = Input(write_cms_rbd_file._spec().input_pin(21), 21, op, -1) self._inputs.append(self._dnyn) - self._dnzn = Input(write_cms_rbd_file._spec().input_pin(22), 22, op, -1) + self._dnzn: Input = Input(write_cms_rbd_file._spec().input_pin(22), 22, op, -1) self._inputs.append(self._dnzn) - self._file_path = Input(write_cms_rbd_file._spec().input_pin(23), 23, op, -1) + self._file_path: Input[str] = Input( + write_cms_rbd_file._spec().input_pin(23), 23, op, -1 + ) self._inputs.append(self._file_path) @property - def model_data(self) -> Input: + def model_data(self) -> Input[PropertyField]: r"""Allows to connect model_data input to the operator. data describing the finite element model @@ -570,7 +598,7 @@ def model_data(self) -> Input: return self._model_data @property - def center_of_mass(self) -> Input: + def center_of_mass(self) -> Input[Field]: r"""Allows to connect center_of_mass input to the operator. center of mass of the body @@ -591,7 +619,7 @@ def center_of_mass(self) -> Input: return self._center_of_mass @property - def inertia_relief(self) -> Input: + def inertia_relief(self) -> Input[Field]: r"""Allows to connect inertia_relief input to the operator. inertia matrix @@ -612,7 +640,7 @@ def inertia_relief(self) -> Input: return self._inertia_relief @property - def model_size(self) -> Input: + def model_size(self) -> Input[float]: r"""Allows to connect model_size input to the operator. size of the diagonal box containing the body @@ -673,7 +701,7 @@ def v_trsf(self) -> Input: return self._v_trsf @property - def k_mat(self) -> Input: + def k_mat(self) -> Input[Field]: r"""Allows to connect k_mat input to the operator. Returns @@ -692,7 +720,7 @@ def k_mat(self) -> Input: return self._k_mat @property - def mass_mat(self) -> Input: + def mass_mat(self) -> Input[Field]: r"""Allows to connect mass_mat input to the operator. Returns @@ -711,7 +739,7 @@ def mass_mat(self) -> Input: return self._mass_mat @property - def c_mat(self) -> Input: + def c_mat(self) -> Input[Field]: r"""Allows to connect c_mat input to the operator. Returns @@ -730,7 +758,7 @@ def c_mat(self) -> Input: return self._c_mat @property - def rhs(self) -> Input: + def rhs(self) -> Input[Field]: r"""Allows to connect rhs input to the operator. Returns @@ -996,7 +1024,7 @@ def dnzn(self) -> Input: return self._dnzn @property - def file_path(self) -> Input: + def file_path(self) -> Input[str]: r"""Allows to connect file_path input to the operator. path with cms_rbd extension where the export occurs @@ -1031,11 +1059,13 @@ class OutputsWriteCmsRbdFile(_Outputs): def __init__(self, op: Operator): super().__init__(write_cms_rbd_file._spec().outputs, op) - self._data_sources = Output(write_cms_rbd_file._spec().output_pin(0), 0, op) + self._data_sources: Output[DataSources] = Output( + write_cms_rbd_file._spec().output_pin(0), 0, op + ) self._outputs.append(self._data_sources) @property - def data_sources(self) -> Output: + def data_sources(self) -> Output[DataSources]: r"""Allows to get data_sources output of the operator Data Source containing the cms_rbd file generated. diff --git a/src/ansys/dpf/core/operators/result/write_motion_dfmf_file.py b/src/ansys/dpf/core/operators/result/write_motion_dfmf_file.py index 98cf565f845..67700aec932 100644 --- a/src/ansys/dpf/core/operators/result/write_motion_dfmf_file.py +++ b/src/ansys/dpf/core/operators/result/write_motion_dfmf_file.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.property_field import PropertyField + class write_motion_dfmf_file(Operator): r"""Write the invariant terms and the model data in a motion dfmf file @@ -421,63 +428,83 @@ class InputsWriteMotionDfmfFile(_Inputs): def __init__(self, op: Operator): super().__init__(write_motion_dfmf_file._spec().inputs, op) - self._model_data = Input(write_motion_dfmf_file._spec().input_pin(0), 0, op, -1) + self._model_data: Input[PropertyField] = Input( + write_motion_dfmf_file._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._model_data) - self._mode_shapes = Input( + self._mode_shapes: Input[FieldsContainer] = Input( write_motion_dfmf_file._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mode_shapes) - self._lumped_mass = Input( + self._lumped_mass: Input[FieldsContainer] = Input( write_motion_dfmf_file._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._lumped_mass) - self._field_coordinates = Input( + self._field_coordinates: Input[Field] = Input( write_motion_dfmf_file._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._field_coordinates) - self._nod = Input(write_motion_dfmf_file._spec().input_pin(4), 4, op, -1) + self._nod: Input = Input(write_motion_dfmf_file._spec().input_pin(4), 4, op, -1) self._inputs.append(self._nod) - self._used_node_index = Input( + self._used_node_index: Input = Input( write_motion_dfmf_file._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._used_node_index) - self._eigenvalue = Input(write_motion_dfmf_file._spec().input_pin(6), 6, op, -1) + self._eigenvalue: Input = Input( + write_motion_dfmf_file._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._eigenvalue) - self._translational_mode_shape = Input( + self._translational_mode_shape: Input = Input( write_motion_dfmf_file._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._translational_mode_shape) - self._rotational_mode_shape = Input( + self._rotational_mode_shape: Input = Input( write_motion_dfmf_file._spec().input_pin(8), 8, op, -1 ) self._inputs.append(self._rotational_mode_shape) - self._invrt_1 = Input(write_motion_dfmf_file._spec().input_pin(9), 9, op, -1) + self._invrt_1: Input[float] = Input( + write_motion_dfmf_file._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._invrt_1) - self._invrt_2 = Input(write_motion_dfmf_file._spec().input_pin(10), 10, op, -1) + self._invrt_2: Input = Input( + write_motion_dfmf_file._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._invrt_2) - self._invrt_3 = Input(write_motion_dfmf_file._spec().input_pin(11), 11, op, -1) + self._invrt_3: Input = Input( + write_motion_dfmf_file._spec().input_pin(11), 11, op, -1 + ) self._inputs.append(self._invrt_3) - self._invrt_4 = Input(write_motion_dfmf_file._spec().input_pin(12), 12, op, -1) + self._invrt_4: Input = Input( + write_motion_dfmf_file._spec().input_pin(12), 12, op, -1 + ) self._inputs.append(self._invrt_4) - self._invrt_5 = Input(write_motion_dfmf_file._spec().input_pin(13), 13, op, -1) + self._invrt_5: Input = Input( + write_motion_dfmf_file._spec().input_pin(13), 13, op, -1 + ) self._inputs.append(self._invrt_5) - self._invrt_6 = Input(write_motion_dfmf_file._spec().input_pin(14), 14, op, -1) + self._invrt_6: Input = Input( + write_motion_dfmf_file._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._invrt_6) - self._invrt_7 = Input(write_motion_dfmf_file._spec().input_pin(15), 15, op, -1) + self._invrt_7: Input = Input( + write_motion_dfmf_file._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._invrt_7) - self._invrt_8 = Input(write_motion_dfmf_file._spec().input_pin(16), 16, op, -1) + self._invrt_8: Input = Input( + write_motion_dfmf_file._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._invrt_8) - self._dfmffile_path = Input( + self._dfmffile_path: Input[str] = Input( write_motion_dfmf_file._spec().input_pin(17), 17, op, -1 ) self._inputs.append(self._dfmffile_path) - self._rstfile_path = Input( + self._rstfile_path: Input[str] = Input( write_motion_dfmf_file._spec().input_pin(18), 18, op, -1 ) self._inputs.append(self._rstfile_path) @property - def model_data(self) -> Input: + def model_data(self) -> Input[PropertyField]: r"""Allows to connect model_data input to the operator. data describing the finite element model @@ -498,7 +525,7 @@ def model_data(self) -> Input: return self._model_data @property - def mode_shapes(self) -> Input: + def mode_shapes(self) -> Input[FieldsContainer]: r"""Allows to connect mode_shapes input to the operator. FieldsContainers containing the mode shapes, which are CST and NOR for the cms method @@ -519,7 +546,7 @@ def mode_shapes(self) -> Input: return self._mode_shapes @property - def lumped_mass(self) -> Input: + def lumped_mass(self) -> Input[FieldsContainer]: r"""Allows to connect lumped_mass input to the operator. FieldsContainers containing the lumped mass @@ -540,7 +567,7 @@ def lumped_mass(self) -> Input: return self._lumped_mass @property - def field_coordinates(self) -> Input: + def field_coordinates(self) -> Input[Field]: r"""Allows to connect field_coordinates input to the operator. coordinates of all nodes @@ -656,7 +683,7 @@ def rotational_mode_shape(self) -> Input: return self._rotational_mode_shape @property - def invrt_1(self) -> Input: + def invrt_1(self) -> Input[float]: r"""Allows to connect invrt_1 input to the operator. Returns @@ -808,7 +835,7 @@ def invrt_8(self) -> Input: return self._invrt_8 @property - def dfmffile_path(self) -> Input: + def dfmffile_path(self) -> Input[str]: r"""Allows to connect dfmffile_path input to the operator. path with motion dfmf extension where the export occurs @@ -829,7 +856,7 @@ def dfmffile_path(self) -> Input: return self._dfmffile_path @property - def rstfile_path(self) -> Input: + def rstfile_path(self) -> Input[str]: r"""Allows to connect rstfile_path input to the operator. Returns @@ -862,13 +889,13 @@ class OutputsWriteMotionDfmfFile(_Outputs): def __init__(self, op: Operator): super().__init__(write_motion_dfmf_file._spec().outputs, op) - self._dfmf_data_source = Output( + self._dfmf_data_source: Output[DataSources] = Output( write_motion_dfmf_file._spec().output_pin(0), 0, op ) self._outputs.append(self._dfmf_data_source) @property - def dfmf_data_source(self) -> Output: + def dfmf_data_source(self) -> Output[DataSources]: r"""Allows to get dfmf_data_source output of the operator Data Source containing the dfmf file generated. diff --git a/src/ansys/dpf/core/operators/result/y_plus.py b/src/ansys/dpf/core/operators/result/y_plus.py index d4e3a72d1f6..04d423e1f9e 100644 --- a/src/ansys/dpf/core/operators/result/y_plus.py +++ b/src/ansys/dpf/core/operators/result/y_plus.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,16 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class y_plus(Operator): r"""Read Y Plus (y+) by calling the readers defined by the datasources. @@ -265,25 +276,41 @@ class InputsYPlus(_Inputs): def __init__(self, op: Operator): super().__init__(y_plus._spec().inputs, op) - self._time_scoping = Input(y_plus._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping | int | float | Field] = Input( + y_plus._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(y_plus._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[ScopingsContainer | Scoping] = Input( + y_plus._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams_container = Input(y_plus._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + y_plus._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(y_plus._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + y_plus._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(y_plus._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + y_plus._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._region_scoping = Input(y_plus._spec().input_pin(25), 25, op, -1) + self._region_scoping: Input[Scoping | int] = Input( + y_plus._spec().input_pin(25), 25, op, -1 + ) self._inputs.append(self._region_scoping) - self._qualifiers1 = Input(y_plus._spec().input_pin(1000), 1000, op, 0) + self._qualifiers1: Input[dict] = Input( + y_plus._spec().input_pin(1000), 1000, op, 0 + ) self._inputs.append(self._qualifiers1) - self._qualifiers2 = Input(y_plus._spec().input_pin(1001), 1001, op, 1) + self._qualifiers2: Input[dict] = Input( + y_plus._spec().input_pin(1001), 1001, op, 1 + ) self._inputs.append(self._qualifiers2) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping | int | float | Field]: r"""Allows to connect time_scoping input to the operator. time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files. To get all data for all time/freq sets, connect an int with value -1. @@ -304,7 +331,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[ScopingsContainer | Scoping]: r"""Allows to connect mesh_scoping input to the operator. nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains @@ -325,7 +352,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -346,7 +373,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -367,7 +394,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. prevents from reading the mesh in the result files @@ -388,7 +415,7 @@ def mesh(self) -> Input: return self._mesh @property - def region_scoping(self) -> Input: + def region_scoping(self) -> Input[Scoping | int]: r"""Allows to connect region_scoping input to the operator. region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results). @@ -409,7 +436,7 @@ def region_scoping(self) -> Input: return self._region_scoping @property - def qualifiers1(self) -> Input: + def qualifiers1(self) -> Input[dict]: r"""Allows to connect qualifiers1 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -430,7 +457,7 @@ def qualifiers1(self) -> Input: return self._qualifiers1 @property - def qualifiers2(self) -> Input: + def qualifiers2(self) -> Input[dict]: r"""Allows to connect qualifiers2 input to the operator. (for Fluid results only) LabelSpace with combination of zone, phases or species ids @@ -465,11 +492,13 @@ class OutputsYPlus(_Outputs): def __init__(self, op: Operator): super().__init__(y_plus._spec().outputs, op) - self._fields_container = Output(y_plus._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + y_plus._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/adapt_with_scopings_container.py b/src/ansys/dpf/core/operators/scoping/adapt_with_scopings_container.py index 3a7571910a1..2cd7922a2db 100644 --- a/src/ansys/dpf/core/operators/scoping/adapt_with_scopings_container.py +++ b/src/ansys/dpf/core/operators/scoping/adapt_with_scopings_container.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scopings_container import ScopingsContainer + class adapt_with_scopings_container(Operator): r"""Rescopes/splits a fields container to correspond to a scopings @@ -178,21 +184,21 @@ class InputsAdaptWithScopingsContainer(_Inputs): def __init__(self, op: Operator): super().__init__(adapt_with_scopings_container._spec().inputs, op) - self._field_or_fields_container = Input( + self._field_or_fields_container: Input[FieldsContainer | Field] = Input( adapt_with_scopings_container._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field_or_fields_container) - self._scopings_container = Input( + self._scopings_container: Input[ScopingsContainer] = Input( adapt_with_scopings_container._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._scopings_container) - self._keep_empty_fields = Input( + self._keep_empty_fields: Input[bool] = Input( adapt_with_scopings_container._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._keep_empty_fields) @property - def field_or_fields_container(self) -> Input: + def field_or_fields_container(self) -> Input[FieldsContainer | Field]: r"""Allows to connect field_or_fields_container input to the operator. Returns @@ -211,7 +217,7 @@ def field_or_fields_container(self) -> Input: return self._field_or_fields_container @property - def scopings_container(self) -> Input: + def scopings_container(self) -> Input[ScopingsContainer]: r"""Allows to connect scopings_container input to the operator. Returns @@ -230,7 +236,7 @@ def scopings_container(self) -> Input: return self._scopings_container @property - def keep_empty_fields(self) -> Input: + def keep_empty_fields(self) -> Input[bool]: r"""Allows to connect keep_empty_fields input to the operator. Default false. @@ -265,13 +271,13 @@ class OutputsAdaptWithScopingsContainer(_Outputs): def __init__(self, op: Operator): super().__init__(adapt_with_scopings_container._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( adapt_with_scopings_container._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/change_fc.py b/src/ansys/dpf/core/operators/scoping/change_fc.py index 68f0c3807f9..003daa087c0 100644 --- a/src/ansys/dpf/core/operators/scoping/change_fc.py +++ b/src/ansys/dpf/core/operators/scoping/change_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scopings_container import ScopingsContainer + class change_fc(Operator): r"""DEPRECATED, PLEASE USE ADAPT WITH SCOPINGS CONTAINER. Rescopes/splits a @@ -162,15 +168,17 @@ class InputsChangeFc(_Inputs): def __init__(self, op: Operator): super().__init__(change_fc._spec().inputs, op) - self._field_or_fields_container = Input( + self._field_or_fields_container: Input[FieldsContainer | Field] = Input( change_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field_or_fields_container) - self._scopings_container = Input(change_fc._spec().input_pin(1), 1, op, -1) + self._scopings_container: Input[ScopingsContainer] = Input( + change_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scopings_container) @property - def field_or_fields_container(self) -> Input: + def field_or_fields_container(self) -> Input[FieldsContainer | Field]: r"""Allows to connect field_or_fields_container input to the operator. Returns @@ -189,7 +197,7 @@ def field_or_fields_container(self) -> Input: return self._field_or_fields_container @property - def scopings_container(self) -> Input: + def scopings_container(self) -> Input[ScopingsContainer]: r"""Allows to connect scopings_container input to the operator. Returns @@ -222,11 +230,13 @@ class OutputsChangeFc(_Outputs): def __init__(self, op: Operator): super().__init__(change_fc._spec().outputs, op) - self._fields_container = Output(change_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + change_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/compute_element_centroids.py b/src/ansys/dpf/core/operators/scoping/compute_element_centroids.py index ab1b442b215..8fb1d8617cd 100644 --- a/src/ansys/dpf/core/operators/scoping/compute_element_centroids.py +++ b/src/ansys/dpf/core/operators/scoping/compute_element_centroids.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class compute_element_centroids(Operator): r"""Computes the element centroids of the mesh. It also outputs the element @@ -168,15 +174,17 @@ class InputsComputeElementCentroids(_Inputs): def __init__(self, op: Operator): super().__init__(compute_element_centroids._spec().inputs, op) - self._element_scoping = Input( + self._element_scoping: Input[Scoping] = Input( compute_element_centroids._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._element_scoping) - self._mesh = Input(compute_element_centroids._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + compute_element_centroids._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def element_scoping(self) -> Input: + def element_scoping(self) -> Input[Scoping]: r"""Allows to connect element_scoping input to the operator. If provided, only the centroids of the elements in the scoping are computed. @@ -197,7 +205,7 @@ def element_scoping(self) -> Input: return self._element_scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Mesh to compute centroids @@ -233,13 +241,17 @@ class OutputsComputeElementCentroids(_Outputs): def __init__(self, op: Operator): super().__init__(compute_element_centroids._spec().outputs, op) - self._centroids = Output(compute_element_centroids._spec().output_pin(0), 0, op) + self._centroids: Output[Field] = Output( + compute_element_centroids._spec().output_pin(0), 0, op + ) self._outputs.append(self._centroids) - self._measure = Output(compute_element_centroids._spec().output_pin(1), 1, op) + self._measure: Output[Field] = Output( + compute_element_centroids._spec().output_pin(1), 1, op + ) self._outputs.append(self._measure) @property - def centroids(self) -> Output: + def centroids(self) -> Output[Field]: r"""Allows to get centroids output of the operator element centroids. @@ -259,7 +271,7 @@ def centroids(self) -> Output: return self._centroids @property - def measure(self) -> Output: + def measure(self) -> Output[Field]: r"""Allows to get measure output of the operator element measure (length, surface or volume depending on the dimension of the element). diff --git a/src/ansys/dpf/core/operators/scoping/connectivity_ids.py b/src/ansys/dpf/core/operators/scoping/connectivity_ids.py index 28a3e7a26b6..0efe3640f2b 100644 --- a/src/ansys/dpf/core/operators/scoping/connectivity_ids.py +++ b/src/ansys/dpf/core/operators/scoping/connectivity_ids.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class connectivity_ids(Operator): r"""Returns the ordered node ids corresponding to the element ids scoping in @@ -189,15 +194,21 @@ class InputsConnectivityIds(_Inputs): def __init__(self, op: Operator): super().__init__(connectivity_ids._spec().inputs, op) - self._mesh_scoping = Input(connectivity_ids._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + connectivity_ids._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._mesh = Input(connectivity_ids._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + connectivity_ids._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._take_mid_nodes = Input(connectivity_ids._spec().input_pin(10), 10, op, -1) + self._take_mid_nodes: Input[bool] = Input( + connectivity_ids._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._take_mid_nodes) @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Elemental scoping @@ -218,7 +229,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. the support of the scoping is expected if there is no mesh in input @@ -239,7 +250,7 @@ def mesh(self) -> Input: return self._mesh @property - def take_mid_nodes(self) -> Input: + def take_mid_nodes(self) -> Input[bool]: r"""Allows to connect take_mid_nodes input to the operator. default is true @@ -275,13 +286,17 @@ class OutputsConnectivityIds(_Outputs): def __init__(self, op: Operator): super().__init__(connectivity_ids._spec().outputs, op) - self._mesh_scoping = Output(connectivity_ids._spec().output_pin(0), 0, op) + self._mesh_scoping: Output[Scoping] = Output( + connectivity_ids._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_scoping) - self._elemental_scoping = Output(connectivity_ids._spec().output_pin(1), 1, op) + self._elemental_scoping: Output[Scoping] = Output( + connectivity_ids._spec().output_pin(1), 1, op + ) self._outputs.append(self._elemental_scoping) @property - def mesh_scoping(self) -> Output: + def mesh_scoping(self) -> Output[Scoping]: r"""Allows to get mesh_scoping output of the operator Returns @@ -299,7 +314,7 @@ def mesh_scoping(self) -> Output: return self._mesh_scoping @property - def elemental_scoping(self) -> Output: + def elemental_scoping(self) -> Output[Scoping]: r"""Allows to get elemental_scoping output of the operator same as the input scoping but with ids duplicated to have the same size as nodal output scoping diff --git a/src/ansys/dpf/core/operators/scoping/elemental_from_mesh.py b/src/ansys/dpf/core/operators/scoping/elemental_from_mesh.py index 888a3e46e47..99c0eb78fd1 100644 --- a/src/ansys/dpf/core/operators/scoping/elemental_from_mesh.py +++ b/src/ansys/dpf/core/operators/scoping/elemental_from_mesh.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class elemental_from_mesh(Operator): r"""Retrieves the elemental scoping of a given input mesh, which contains @@ -142,11 +147,13 @@ class InputsElementalFromMesh(_Inputs): def __init__(self, op: Operator): super().__init__(elemental_from_mesh._spec().inputs, op) - self._mesh = Input(elemental_from_mesh._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + elemental_from_mesh._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -179,11 +186,13 @@ class OutputsElementalFromMesh(_Outputs): def __init__(self, op: Operator): super().__init__(elemental_from_mesh._spec().outputs, op) - self._mesh_scoping = Output(elemental_from_mesh._spec().output_pin(0), 0, op) + self._mesh_scoping: Output[Scoping] = Output( + elemental_from_mesh._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_scoping) @property - def mesh_scoping(self) -> Output: + def mesh_scoping(self) -> Output[Scoping]: r"""Allows to get mesh_scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/from_mesh.py b/src/ansys/dpf/core/operators/scoping/from_mesh.py index 46832884549..3a065243d1f 100644 --- a/src/ansys/dpf/core/operators/scoping/from_mesh.py +++ b/src/ansys/dpf/core/operators/scoping/from_mesh.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class from_mesh(Operator): r"""Provides the entire mesh scoping based on the requested location @@ -155,13 +160,17 @@ class InputsFromMesh(_Inputs): def __init__(self, op: Operator): super().__init__(from_mesh._spec().inputs, op) - self._mesh = Input(from_mesh._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + from_mesh._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(from_mesh._spec().input_pin(1), 1, op, -1) + self._requested_location: Input[str] = Input( + from_mesh._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._requested_location) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -180,7 +189,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. if nothing the operator returns the nodes scoping, possible locations are: Nodal(default) or Elemental @@ -215,11 +224,11 @@ class OutputsFromMesh(_Outputs): def __init__(self, op: Operator): super().__init__(from_mesh._spec().outputs, op) - self._scoping = Output(from_mesh._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output(from_mesh._spec().output_pin(0), 0, op) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/intersect.py b/src/ansys/dpf/core/operators/scoping/intersect.py index c29ff2b2e5f..5cab42946c0 100644 --- a/src/ansys/dpf/core/operators/scoping/intersect.py +++ b/src/ansys/dpf/core/operators/scoping/intersect.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scoping import Scoping + class intersect(Operator): r"""Intersect 2 scopings and return the intersection and the difference @@ -164,13 +168,17 @@ class InputsIntersect(_Inputs): def __init__(self, op: Operator): super().__init__(intersect._spec().inputs, op) - self._scopingA = Input(intersect._spec().input_pin(0), 0, op, -1) + self._scopingA: Input[Scoping] = Input( + intersect._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._scopingA) - self._scopingB = Input(intersect._spec().input_pin(1), 1, op, -1) + self._scopingB: Input[Scoping] = Input( + intersect._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scopingB) @property - def scopingA(self) -> Input: + def scopingA(self) -> Input[Scoping]: r"""Allows to connect scopingA input to the operator. Returns @@ -189,7 +197,7 @@ def scopingA(self) -> Input: return self._scopingA @property - def scopingB(self) -> Input: + def scopingB(self) -> Input[Scoping]: r"""Allows to connect scopingB input to the operator. Returns @@ -223,13 +231,17 @@ class OutputsIntersect(_Outputs): def __init__(self, op: Operator): super().__init__(intersect._spec().outputs, op) - self._intersection = Output(intersect._spec().output_pin(0), 0, op) + self._intersection: Output[Scoping] = Output( + intersect._spec().output_pin(0), 0, op + ) self._outputs.append(self._intersection) - self._scopingA_min_intersection = Output(intersect._spec().output_pin(1), 1, op) + self._scopingA_min_intersection: Output[Scoping] = Output( + intersect._spec().output_pin(1), 1, op + ) self._outputs.append(self._scopingA_min_intersection) @property - def intersection(self) -> Output: + def intersection(self) -> Output[Scoping]: r"""Allows to get intersection output of the operator Returns @@ -247,7 +259,7 @@ def intersection(self) -> Output: return self._intersection @property - def scopingA_min_intersection(self) -> Output: + def scopingA_min_intersection(self) -> Output[Scoping]: r"""Allows to get scopingA_min_intersection output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/nodal_from_mesh.py b/src/ansys/dpf/core/operators/scoping/nodal_from_mesh.py index 419b719a2db..04669fd8bcd 100644 --- a/src/ansys/dpf/core/operators/scoping/nodal_from_mesh.py +++ b/src/ansys/dpf/core/operators/scoping/nodal_from_mesh.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class nodal_from_mesh(Operator): r"""Retrieves the nodal scoping of a given input mesh, which contains the @@ -142,11 +147,13 @@ class InputsNodalFromMesh(_Inputs): def __init__(self, op: Operator): super().__init__(nodal_from_mesh._spec().inputs, op) - self._mesh = Input(nodal_from_mesh._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion] = Input( + nodal_from_mesh._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -179,11 +186,13 @@ class OutputsNodalFromMesh(_Outputs): def __init__(self, op: Operator): super().__init__(nodal_from_mesh._spec().outputs, op) - self._mesh_scoping = Output(nodal_from_mesh._spec().output_pin(0), 0, op) + self._mesh_scoping: Output[Scoping] = Output( + nodal_from_mesh._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_scoping) @property - def mesh_scoping(self) -> Output: + def mesh_scoping(self) -> Output[Scoping]: r"""Allows to get mesh_scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/on_mesh_property.py b/src/ansys/dpf/core/operators/scoping/on_mesh_property.py index 01a5311b820..b4195ababc1 100644 --- a/src/ansys/dpf/core/operators/scoping/on_mesh_property.py +++ b/src/ansys/dpf/core/operators/scoping/on_mesh_property.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + class on_mesh_property(Operator): r"""Provides a scoping on a given property name and a property number. @@ -213,21 +218,29 @@ class InputsOnMeshProperty(_Inputs): def __init__(self, op: Operator): super().__init__(on_mesh_property._spec().inputs, op) - self._requested_location = Input( + self._requested_location: Input[str] = Input( on_mesh_property._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._requested_location) - self._property_name = Input(on_mesh_property._spec().input_pin(1), 1, op, -1) + self._property_name: Input[str] = Input( + on_mesh_property._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._property_name) - self._property_id = Input(on_mesh_property._spec().input_pin(2), 2, op, -1) + self._property_id: Input[int] = Input( + on_mesh_property._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._property_id) - self._inclusive = Input(on_mesh_property._spec().input_pin(5), 5, op, -1) + self._inclusive: Input[int] = Input( + on_mesh_property._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._inclusive) - self._mesh = Input(on_mesh_property._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + on_mesh_property._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Nodal or Elemental location are expected @@ -248,7 +261,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. ex "apdl_element_type", "elprops", "mat", "eltype", "connectivity", "shell_elements", "solid_elements", "skin_elements", "beam_elements", "point_elements"... @@ -269,7 +282,7 @@ def property_name(self) -> Input: return self._property_name @property - def property_id(self) -> Input: + def property_id(self) -> Input[int]: r"""Allows to connect property_id input to the operator. Returns @@ -288,7 +301,7 @@ def property_id(self) -> Input: return self._property_id @property - def inclusive(self) -> Input: + def inclusive(self) -> Input[int]: r"""Allows to connect inclusive input to the operator. Default is 1 (inclusive is true). Only used if 'shape_values' property is requested. If inclusive is set to 1 and 'elprops' property field is available, it will select all elements that are set on the corresponding property. If inclusive is set to 0 (exclusive) and 'elprops' property field is available, it will select the elements that are only set on this property. @@ -309,7 +322,7 @@ def inclusive(self) -> Input: return self._inclusive @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. Returns @@ -342,11 +355,13 @@ class OutputsOnMeshProperty(_Outputs): def __init__(self, op: Operator): super().__init__(on_mesh_property._spec().outputs, op) - self._mesh_scoping = Output(on_mesh_property._spec().output_pin(0), 0, op) + self._mesh_scoping: Output[Scoping] = Output( + on_mesh_property._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_scoping) @property - def mesh_scoping(self) -> Output: + def mesh_scoping(self) -> Output[Scoping]: r"""Allows to get mesh_scoping output of the operator Scoping diff --git a/src/ansys/dpf/core/operators/scoping/on_named_selection.py b/src/ansys/dpf/core/operators/scoping/on_named_selection.py index e8f0283349b..9c66379fdac 100644 --- a/src/ansys/dpf/core/operators/scoping/on_named_selection.py +++ b/src/ansys/dpf/core/operators/scoping/on_named_selection.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class on_named_selection(Operator): r"""provides a scoping at a given location based on a given named selection @@ -207,25 +213,29 @@ class InputsOnNamedSelection(_Inputs): def __init__(self, op: Operator): super().__init__(on_named_selection._spec().inputs, op) - self._requested_location = Input( + self._requested_location: Input[str] = Input( on_named_selection._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._requested_location) - self._named_selection_name = Input( + self._named_selection_name: Input[str] = Input( on_named_selection._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._named_selection_name) - self._int_inclusive = Input(on_named_selection._spec().input_pin(2), 2, op, -1) + self._int_inclusive: Input[int] = Input( + on_named_selection._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._int_inclusive) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( on_named_selection._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(on_named_selection._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + on_named_selection._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Returns @@ -244,7 +254,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def named_selection_name(self) -> Input: + def named_selection_name(self) -> Input[str]: r"""Allows to connect named_selection_name input to the operator. the string is expected to be in upper case @@ -265,7 +275,7 @@ def named_selection_name(self) -> Input: return self._named_selection_name @property - def int_inclusive(self) -> Input: + def int_inclusive(self) -> Input[int]: r"""Allows to connect int_inclusive input to the operator. If element scoping is requested on a nodal named selection, if Inclusive == 1 then add all the elements adjacent to the nodes.If Inclusive == 0, only the elements which have all their nodes in the named selection are included @@ -286,7 +296,7 @@ def int_inclusive(self) -> Input: return self._int_inclusive @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -305,7 +315,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -338,11 +348,13 @@ class OutputsOnNamedSelection(_Outputs): def __init__(self, op: Operator): super().__init__(on_named_selection._spec().outputs, op) - self._mesh_scoping = Output(on_named_selection._spec().output_pin(0), 0, op) + self._mesh_scoping: Output[Scoping] = Output( + on_named_selection._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_scoping) @property - def mesh_scoping(self) -> Output: + def mesh_scoping(self) -> Output[Scoping]: r"""Allows to get mesh_scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/on_property.py b/src/ansys/dpf/core/operators/scoping/on_property.py index 978b572083d..cf33e3cecac 100644 --- a/src/ansys/dpf/core/operators/scoping/on_property.py +++ b/src/ansys/dpf/core/operators/scoping/on_property.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class on_property(Operator): r"""Provides a scoping at a given location based on a given property name @@ -227,21 +233,31 @@ class InputsOnProperty(_Inputs): def __init__(self, op: Operator): super().__init__(on_property._spec().inputs, op) - self._requested_location = Input(on_property._spec().input_pin(0), 0, op, -1) + self._requested_location: Input[str] = Input( + on_property._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._requested_location) - self._property_name = Input(on_property._spec().input_pin(1), 1, op, -1) + self._property_name: Input[str] = Input( + on_property._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._property_name) - self._property_id = Input(on_property._spec().input_pin(2), 2, op, -1) + self._property_id: Input[int] = Input( + on_property._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._property_id) - self._streams_container = Input(on_property._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + on_property._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(on_property._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + on_property._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._inclusive = Input(on_property._spec().input_pin(5), 5, op, -1) + self._inclusive: Input[int] = Input(on_property._spec().input_pin(5), 5, op, -1) self._inputs.append(self._inclusive) @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Nodal or Elemental location are expected @@ -262,7 +278,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. ex "mapdl_element_type", "mapdl_element_type_id", "apdl_type_index", "mapdl_type_id", "material", "apdl_section_id", "apdl_real_id", "apdl_esys_id", "shell_axi", "volume_axi"... @@ -283,7 +299,7 @@ def property_name(self) -> Input: return self._property_name @property - def property_id(self) -> Input: + def property_id(self) -> Input[int]: r"""Allows to connect property_id input to the operator. property_id or vector of property ids @@ -304,7 +320,7 @@ def property_id(self) -> Input: return self._property_id @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -323,7 +339,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -342,7 +358,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def inclusive(self) -> Input: + def inclusive(self) -> Input[int]: r"""Allows to connect inclusive input to the operator. If element scoping is requested on a nodal named selection, if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included @@ -377,11 +393,13 @@ class OutputsOnProperty(_Outputs): def __init__(self, op: Operator): super().__init__(on_property._spec().outputs, op) - self._mesh_scoping = Output(on_property._spec().output_pin(0), 0, op) + self._mesh_scoping: Output[Scoping] = Output( + on_property._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_scoping) @property - def mesh_scoping(self) -> Output: + def mesh_scoping(self) -> Output[Scoping]: r"""Allows to get mesh_scoping output of the operator Scoping diff --git a/src/ansys/dpf/core/operators/scoping/reduce_sampling.py b/src/ansys/dpf/core/operators/scoping/reduce_sampling.py index ab342734adf..f37eab00ccb 100644 --- a/src/ansys/dpf/core/operators/scoping/reduce_sampling.py +++ b/src/ansys/dpf/core/operators/scoping/reduce_sampling.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scoping import Scoping + class reduce_sampling(Operator): r"""Take a scoping and remove half of it’s content. @@ -155,13 +159,17 @@ class InputsReduceSampling(_Inputs): def __init__(self, op: Operator): super().__init__(reduce_sampling._spec().inputs, op) - self._mesh_scoping = Input(reduce_sampling._spec().input_pin(0), 0, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + reduce_sampling._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._denominator = Input(reduce_sampling._spec().input_pin(1), 1, op, -1) + self._denominator: Input[int] = Input( + reduce_sampling._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._denominator) @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -180,7 +188,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def denominator(self) -> Input: + def denominator(self) -> Input[int]: r"""Allows to connect denominator input to the operator. Set the number of time the scoping is reduced (default is 2). Must be integer value above 1. @@ -215,11 +223,13 @@ class OutputsReduceSampling(_Outputs): def __init__(self, op: Operator): super().__init__(reduce_sampling._spec().outputs, op) - self._mesh_scoping = Output(reduce_sampling._spec().output_pin(0), 0, op) + self._mesh_scoping: Output[Scoping] = Output( + reduce_sampling._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_scoping) @property - def mesh_scoping(self) -> Output: + def mesh_scoping(self) -> Output[Scoping]: r"""Allows to get mesh_scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/rescope.py b/src/ansys/dpf/core/operators/scoping/rescope.py index aedb84b6145..065f5924d81 100644 --- a/src/ansys/dpf/core/operators/scoping/rescope.py +++ b/src/ansys/dpf/core/operators/scoping/rescope.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class rescope(Operator): r"""Rescopes a field on the given scoping. If an ID does not exist in the @@ -179,15 +185,21 @@ class InputsRescope(_Inputs): def __init__(self, op: Operator): super().__init__(rescope._spec().inputs, op) - self._fields = Input(rescope._spec().input_pin(0), 0, op, -1) + self._fields: Input[FieldsContainer | Field] = Input( + rescope._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields) - self._mesh_scoping = Input(rescope._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + rescope._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._default_value = Input(rescope._spec().input_pin(2), 2, op, -1) + self._default_value: Input[float] = Input( + rescope._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._default_value) @property - def fields(self) -> Input: + def fields(self) -> Input[FieldsContainer | Field]: r"""Allows to connect fields input to the operator. Returns @@ -206,7 +218,7 @@ def fields(self) -> Input: return self._fields @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -225,7 +237,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def default_value(self) -> Input: + def default_value(self) -> Input[float]: r"""Allows to connect default_value input to the operator. If pin 2 is used, the IDs not found in the field are added with this default value. diff --git a/src/ansys/dpf/core/operators/scoping/rescope_custom_type_field.py b/src/ansys/dpf/core/operators/scoping/rescope_custom_type_field.py index 057b87fbd34..5fea6fe7847 100644 --- a/src/ansys/dpf/core/operators/scoping/rescope_custom_type_field.py +++ b/src/ansys/dpf/core/operators/scoping/rescope_custom_type_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.custom_type_field import CustomTypeField + from ansys.dpf.core.scoping import Scoping + class rescope_custom_type_field(Operator): r"""Rescopes a custom type field on the given scoping. If an ID does not @@ -181,19 +186,21 @@ class InputsRescopeCustomTypeField(_Inputs): def __init__(self, op: Operator): super().__init__(rescope_custom_type_field._spec().inputs, op) - self._fields = Input(rescope_custom_type_field._spec().input_pin(0), 0, op, -1) + self._fields: Input[CustomTypeField] = Input( + rescope_custom_type_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( rescope_custom_type_field._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._default_value = Input( + self._default_value: Input[CustomTypeField | CustomTypeField] = Input( rescope_custom_type_field._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._default_value) @property - def fields(self) -> Input: + def fields(self) -> Input[CustomTypeField]: r"""Allows to connect fields input to the operator. Returns @@ -212,7 +219,7 @@ def fields(self) -> Input: return self._fields @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -231,7 +238,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def default_value(self) -> Input: + def default_value(self) -> Input[CustomTypeField | CustomTypeField]: r"""Allows to connect default_value input to the operator. If pin 2 is used, the IDs not found in the custom type field are added with this default value. diff --git a/src/ansys/dpf/core/operators/scoping/rescope_fc.py b/src/ansys/dpf/core/operators/scoping/rescope_fc.py index 72102bb8e31..e65aa67fe4d 100644 --- a/src/ansys/dpf/core/operators/scoping/rescope_fc.py +++ b/src/ansys/dpf/core/operators/scoping/rescope_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class rescope_fc(Operator): r"""Rescopes a field on the given scoping. If an ID does not exist in the @@ -178,15 +183,21 @@ class InputsRescopeFc(_Inputs): def __init__(self, op: Operator): super().__init__(rescope_fc._spec().inputs, op) - self._fields_container = Input(rescope_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + rescope_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh_scoping = Input(rescope_fc._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + rescope_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._default_value = Input(rescope_fc._spec().input_pin(2), 2, op, -1) + self._default_value: Input[float] = Input( + rescope_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._default_value) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -205,7 +216,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -224,7 +235,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def default_value(self) -> Input: + def default_value(self) -> Input[float]: r"""Allows to connect default_value input to the operator. If pin 2 is used, the IDs not found in the field are added with this default value. @@ -259,11 +270,13 @@ class OutputsRescopeFc(_Outputs): def __init__(self, op: Operator): super().__init__(rescope_fc._spec().outputs, op) - self._fields_container = Output(rescope_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + rescope_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/scoping/rescope_property_field.py b/src/ansys/dpf/core/operators/scoping/rescope_property_field.py index deaf6d2c0fa..5081a2896de 100644 --- a/src/ansys/dpf/core/operators/scoping/rescope_property_field.py +++ b/src/ansys/dpf/core/operators/scoping/rescope_property_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.scoping import Scoping + class rescope_property_field(Operator): r"""Rescopes a property field on the given scoping. If an ID does not exist @@ -181,19 +186,21 @@ class InputsRescopePropertyField(_Inputs): def __init__(self, op: Operator): super().__init__(rescope_property_field._spec().inputs, op) - self._fields = Input(rescope_property_field._spec().input_pin(0), 0, op, -1) + self._fields: Input[PropertyField] = Input( + rescope_property_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( rescope_property_field._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._default_value = Input( + self._default_value: Input[int] = Input( rescope_property_field._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._default_value) @property - def fields(self) -> Input: + def fields(self) -> Input[PropertyField]: r"""Allows to connect fields input to the operator. Returns @@ -212,7 +219,7 @@ def fields(self) -> Input: return self._fields @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -231,7 +238,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def default_value(self) -> Input: + def default_value(self) -> Input[int]: r"""Allows to connect default_value input to the operator. If pin 2 is used, the IDs not found in the property field are added with this default value. diff --git a/src/ansys/dpf/core/operators/scoping/scoping_get_attribute.py b/src/ansys/dpf/core/operators/scoping/scoping_get_attribute.py index 3c622b97b46..054c38080e4 100644 --- a/src/ansys/dpf/core/operators/scoping/scoping_get_attribute.py +++ b/src/ansys/dpf/core/operators/scoping/scoping_get_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scoping import Scoping + class scoping_get_attribute(Operator): r"""Uses the Scoping APIs to return a given attribute of the scoping in @@ -159,15 +163,17 @@ class InputsScopingGetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(scoping_get_attribute._spec().inputs, op) - self._scoping = Input(scoping_get_attribute._spec().input_pin(0), 0, op, -1) + self._scoping: Input[Scoping] = Input( + scoping_get_attribute._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._scoping) - self._property_name = Input( + self._property_name: Input[str] = Input( scoping_get_attribute._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._property_name) @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Returns @@ -186,7 +192,7 @@ def scoping(self) -> Input: return self._scoping @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Supported property names are: "ids", "location". diff --git a/src/ansys/dpf/core/operators/scoping/split_on_property_type.py b/src/ansys/dpf/core/operators/scoping/split_on_property_type.py index aae7d128b1c..d711384ec18 100644 --- a/src/ansys/dpf/core/operators/scoping/split_on_property_type.py +++ b/src/ansys/dpf/core/operators/scoping/split_on_property_type.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class split_on_property_type(Operator): r"""Splits a given scoping or the mesh scoping (nodal or elemental) on given @@ -233,27 +239,33 @@ class InputsSplitOnPropertyType(_Inputs): def __init__(self, op: Operator): super().__init__(split_on_property_type._spec().inputs, op) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( split_on_property_type._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._mesh = Input(split_on_property_type._spec().input_pin(7), 7, op, -1) + self._mesh: Input[MeshedRegion] = Input( + split_on_property_type._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input( + self._requested_location: Input[str] = Input( split_on_property_type._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) - self._skin_case = Input( + self._skin_case: Input[int] = Input( split_on_property_type._spec().input_pin(12), 12, op, -1 ) self._inputs.append(self._skin_case) - self._label1 = Input(split_on_property_type._spec().input_pin(13), 13, op, 0) + self._label1: Input[str] = Input( + split_on_property_type._spec().input_pin(13), 13, op, 0 + ) self._inputs.append(self._label1) - self._label2 = Input(split_on_property_type._spec().input_pin(14), 14, op, 1) + self._label2: Input[str] = Input( + split_on_property_type._spec().input_pin(14), 14, op, 1 + ) self._inputs.append(self._label2) @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Scoping @@ -274,7 +286,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. mesh region @@ -295,7 +307,7 @@ def mesh(self) -> Input: return self._mesh @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. location (default is elemental) @@ -316,7 +328,7 @@ def requested_location(self) -> Input: return self._requested_location @property - def skin_case(self) -> Input: + def skin_case(self) -> Input[int]: r"""Allows to connect skin_case input to the operator. set to 0: to have skin elements in their own group, 1: merge skin and solid elements, 2: merge skin and shell elements (default) @@ -337,7 +349,7 @@ def skin_case(self) -> Input: return self._skin_case @property - def label1(self) -> Input: + def label1(self) -> Input[str]: r"""Allows to connect label1 input to the operator. properties to apply the filtering 'mat' and/or 'elshape' (since 2025R1 it supports any property name contained in the mesh property fields) (default is 'elshape') @@ -358,7 +370,7 @@ def label1(self) -> Input: return self._label1 @property - def label2(self) -> Input: + def label2(self) -> Input[str]: r"""Allows to connect label2 input to the operator. properties to apply the filtering 'mat' and/or 'elshape' (since 2025R1 it supports any property name contained in the mesh property fields) (default is 'elshape') @@ -393,11 +405,13 @@ class OutputsSplitOnPropertyType(_Outputs): def __init__(self, op: Operator): super().__init__(split_on_property_type._spec().outputs, op) - self._mesh_scoping = Output(split_on_property_type._spec().output_pin(0), 0, op) + self._mesh_scoping: Output[ScopingsContainer] = Output( + split_on_property_type._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_scoping) @property - def mesh_scoping(self) -> Output: + def mesh_scoping(self) -> Output[ScopingsContainer]: r"""Allows to get mesh_scoping output of the operator Scoping diff --git a/src/ansys/dpf/core/operators/scoping/transpose.py b/src/ansys/dpf/core/operators/scoping/transpose.py index 50c8ef666eb..a4e3b381e94 100644 --- a/src/ansys/dpf/core/operators/scoping/transpose.py +++ b/src/ansys/dpf/core/operators/scoping/transpose.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class transpose(Operator): r"""Transposes the input scoping or scopings container (Elemental/Faces –> @@ -197,17 +204,23 @@ class InputsTranspose(_Inputs): def __init__(self, op: Operator): super().__init__(transpose._spec().inputs, op) - self._mesh_scoping = Input(transpose._spec().input_pin(0), 0, op, -1) + self._mesh_scoping: Input[Scoping | ScopingsContainer] = Input( + transpose._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._meshed_region = Input(transpose._spec().input_pin(1), 1, op, -1) + self._meshed_region: Input[MeshedRegion | MeshesContainer] = Input( + transpose._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._meshed_region) - self._inclusive = Input(transpose._spec().input_pin(2), 2, op, -1) + self._inclusive: Input[int] = Input(transpose._spec().input_pin(2), 2, op, -1) self._inputs.append(self._inclusive) - self._requested_location = Input(transpose._spec().input_pin(9), 9, op, -1) + self._requested_location: Input[str] = Input( + transpose._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect mesh_scoping input to the operator. Scoping or scopings container (the input type is the output type) @@ -228,7 +241,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def meshed_region(self) -> Input: + def meshed_region(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect meshed_region input to the operator. Returns @@ -247,7 +260,7 @@ def meshed_region(self) -> Input: return self._meshed_region @property - def inclusive(self) -> Input: + def inclusive(self) -> Input[int]: r"""Allows to connect inclusive input to the operator. if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included @@ -268,7 +281,7 @@ def inclusive(self) -> Input: return self._inclusive @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[str]: r"""Allows to connect requested_location input to the operator. Output scoping location for meshes with nodes, faces and elements. By default, elemental and faces scopings transpose to nodal, and nodal scopings transpose to elemental. diff --git a/src/ansys/dpf/core/operators/serialization/csv_to_field.py b/src/ansys/dpf/core/operators/serialization/csv_to_field.py index d56a947b7ed..4ce0b6c73c0 100644 --- a/src/ansys/dpf/core/operators/serialization/csv_to_field.py +++ b/src/ansys/dpf/core/operators/serialization/csv_to_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class csv_to_field(Operator): r"""transform csv file to a field or fields container @@ -155,13 +161,17 @@ class InputsCsvToField(_Inputs): def __init__(self, op: Operator): super().__init__(csv_to_field._spec().inputs, op) - self._time_scoping = Input(csv_to_field._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping] = Input( + csv_to_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._data_sources = Input(csv_to_field._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + csv_to_field._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -180,7 +190,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. data sources containing a file with csv extension @@ -215,11 +225,13 @@ class OutputsCsvToField(_Outputs): def __init__(self, op: Operator): super().__init__(csv_to_field._spec().outputs, op) - self._fields_container = Output(csv_to_field._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + csv_to_field._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/serialization/data_tree_to_json.py b/src/ansys/dpf/core/operators/serialization/data_tree_to_json.py index 40a5dc8330d..ce0e8237024 100644 --- a/src/ansys/dpf/core/operators/serialization/data_tree_to_json.py +++ b/src/ansys/dpf/core/operators/serialization/data_tree_to_json.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_tree import DataTree + class data_tree_to_json(Operator): r"""Writes a json file or string from a DataTree @@ -155,13 +159,17 @@ class InputsDataTreeToJson(_Inputs): def __init__(self, op: Operator): super().__init__(data_tree_to_json._spec().inputs, op) - self._data_tree = Input(data_tree_to_json._spec().input_pin(0), 0, op, -1) + self._data_tree: Input[DataTree] = Input( + data_tree_to_json._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._data_tree) - self._path = Input(data_tree_to_json._spec().input_pin(1), 1, op, -1) + self._path: Input[str] = Input( + data_tree_to_json._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._path) @property - def data_tree(self) -> Input: + def data_tree(self) -> Input[DataTree]: r"""Allows to connect data_tree input to the operator. Returns @@ -180,7 +188,7 @@ def data_tree(self) -> Input: return self._data_tree @property - def path(self) -> Input: + def path(self) -> Input[str]: r"""Allows to connect path input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/serialization/data_tree_to_txt.py b/src/ansys/dpf/core/operators/serialization/data_tree_to_txt.py index 067a16eba1c..07d7502efd8 100644 --- a/src/ansys/dpf/core/operators/serialization/data_tree_to_txt.py +++ b/src/ansys/dpf/core/operators/serialization/data_tree_to_txt.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_tree import DataTree + class data_tree_to_txt(Operator): r"""Writes a txt file or string from a DataTree @@ -155,13 +159,15 @@ class InputsDataTreeToTxt(_Inputs): def __init__(self, op: Operator): super().__init__(data_tree_to_txt._spec().inputs, op) - self._data_tree = Input(data_tree_to_txt._spec().input_pin(0), 0, op, -1) + self._data_tree: Input[DataTree] = Input( + data_tree_to_txt._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._data_tree) - self._path = Input(data_tree_to_txt._spec().input_pin(1), 1, op, -1) + self._path: Input[str] = Input(data_tree_to_txt._spec().input_pin(1), 1, op, -1) self._inputs.append(self._path) @property - def data_tree(self) -> Input: + def data_tree(self) -> Input[DataTree]: r"""Allows to connect data_tree input to the operator. Returns @@ -180,7 +186,7 @@ def data_tree(self) -> Input: return self._data_tree @property - def path(self) -> Input: + def path(self) -> Input[str]: r"""Allows to connect path input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/serialization/deserializer.py b/src/ansys/dpf/core/operators/serialization/deserializer.py index 830a0f00e69..4d74b2cfbff 100644 --- a/src/ansys/dpf/core/operators/serialization/deserializer.py +++ b/src/ansys/dpf/core/operators/serialization/deserializer.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + class deserializer(Operator): r"""Takes a file generated by the serializer and deserializes it into DPF’s @@ -168,13 +172,17 @@ class InputsDeserializer(_Inputs): def __init__(self, op: Operator): super().__init__(deserializer._spec().inputs, op) - self._stream_type = Input(deserializer._spec().input_pin(-1), -1, op, -1) + self._stream_type: Input[int] = Input( + deserializer._spec().input_pin(-1), -1, op, -1 + ) self._inputs.append(self._stream_type) - self._file_path = Input(deserializer._spec().input_pin(0), 0, op, -1) + self._file_path: Input[str] = Input( + deserializer._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._file_path) @property - def stream_type(self) -> Input: + def stream_type(self) -> Input[int]: r"""Allows to connect stream_type input to the operator. 0 for ASCII (default), and 1 for binary @@ -195,7 +203,7 @@ def stream_type(self) -> Input: return self._stream_type @property - def file_path(self) -> Input: + def file_path(self) -> Input[str]: r"""Allows to connect file_path input to the operator. file path @@ -231,13 +239,17 @@ class OutputsDeserializer(_Outputs): def __init__(self, op: Operator): super().__init__(deserializer._spec().outputs, op) - self._any_output1 = Output(deserializer._spec().output_pin(1), 1, op) + self._any_output1: Output[Any] = Output( + deserializer._spec().output_pin(1), 1, op + ) self._outputs.append(self._any_output1) - self._any_output2 = Output(deserializer._spec().output_pin(2), 2, op) + self._any_output2: Output[Any] = Output( + deserializer._spec().output_pin(2), 2, op + ) self._outputs.append(self._any_output2) @property - def any_output1(self) -> Output: + def any_output1(self) -> Output[Any]: r"""Allows to get any_output1 output of the operator number and types of outputs corresponding of the inputs used in the serialization @@ -257,7 +269,7 @@ def any_output1(self) -> Output: return self._any_output1 @property - def any_output2(self) -> Output: + def any_output2(self) -> Output[Any]: r"""Allows to get any_output2 output of the operator number and types of outputs corresponding of the inputs used in the serialization diff --git a/src/ansys/dpf/core/operators/serialization/export_symbolic_workflow.py b/src/ansys/dpf/core/operators/serialization/export_symbolic_workflow.py index b0469267282..81d528198b2 100644 --- a/src/ansys/dpf/core/operators/serialization/export_symbolic_workflow.py +++ b/src/ansys/dpf/core/operators/serialization/export_symbolic_workflow.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.workflow import Workflow + class export_symbolic_workflow(Operator): r"""Transforms a Workflow into a symbolic Workflow and writes it to a file @@ -195,17 +199,25 @@ class InputsExportSymbolicWorkflow(_Inputs): def __init__(self, op: Operator): super().__init__(export_symbolic_workflow._spec().inputs, op) - self._workflow = Input(export_symbolic_workflow._spec().input_pin(0), 0, op, -1) + self._workflow: Input[Workflow] = Input( + export_symbolic_workflow._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._workflow) - self._path = Input(export_symbolic_workflow._spec().input_pin(1), 1, op, -1) + self._path: Input[str] = Input( + export_symbolic_workflow._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._path) - self._format = Input(export_symbolic_workflow._spec().input_pin(2), 2, op, -1) + self._format: Input[int] = Input( + export_symbolic_workflow._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._format) - self._options = Input(export_symbolic_workflow._spec().input_pin(3), 3, op, -1) + self._options: Input[int] = Input( + export_symbolic_workflow._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._options) @property - def workflow(self) -> Input: + def workflow(self) -> Input[Workflow]: r"""Allows to connect workflow input to the operator. Returns @@ -224,7 +236,7 @@ def workflow(self) -> Input: return self._workflow @property - def path(self) -> Input: + def path(self) -> Input[str]: r"""Allows to connect path input to the operator. Returns @@ -243,7 +255,7 @@ def path(self) -> Input: return self._path @property - def format(self) -> Input: + def format(self) -> Input[int]: r"""Allows to connect format input to the operator. 0 is ASCII format and 1 is binary, default is 0. @@ -264,7 +276,7 @@ def format(self) -> Input: return self._format @property - def options(self) -> Input: + def options(self) -> Input[int]: r"""Allows to connect options input to the operator. 1 copies connections with its data, 2 forwards named inputs and outputs names, 7 copies connections of named inputs and ouputs with their data. default is 7. diff --git a/src/ansys/dpf/core/operators/serialization/field_to_csv.py b/src/ansys/dpf/core/operators/serialization/field_to_csv.py index b562c22d6c5..f0785416bb7 100644 --- a/src/ansys/dpf/core/operators/serialization/field_to_csv.py +++ b/src/ansys/dpf/core/operators/serialization/field_to_csv.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_to_csv(Operator): r"""Exports a field or a fields container into a csv file @@ -167,17 +172,21 @@ class InputsFieldToCsv(_Inputs): def __init__(self, op: Operator): super().__init__(field_to_csv._spec().inputs, op) - self._field_or_fields_container = Input( + self._field_or_fields_container: Input[FieldsContainer | Field] = Input( field_to_csv._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field_or_fields_container) - self._file_path = Input(field_to_csv._spec().input_pin(1), 1, op, -1) + self._file_path: Input[str] = Input( + field_to_csv._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._file_path) - self._storage_type = Input(field_to_csv._spec().input_pin(2), 2, op, -1) + self._storage_type: Input[int] = Input( + field_to_csv._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._storage_type) @property - def field_or_fields_container(self) -> Input: + def field_or_fields_container(self) -> Input[FieldsContainer | Field]: r"""Allows to connect field_or_fields_container input to the operator. field_or_fields_container @@ -198,7 +207,7 @@ def field_or_fields_container(self) -> Input: return self._field_or_fields_container @property - def file_path(self) -> Input: + def file_path(self) -> Input[str]: r"""Allows to connect file_path input to the operator. Returns @@ -217,7 +226,7 @@ def file_path(self) -> Input: return self._file_path @property - def storage_type(self) -> Input: + def storage_type(self) -> Input[int]: r"""Allows to connect storage_type input to the operator. storage type : if matrices (without any particularity) are included in the fields container, the storage format can be chosen. 0 : flat/line format, 1 : ranked format. If 1 is chosen, the csv can not be read by "csv to field" operator anymore. Default : 0. diff --git a/src/ansys/dpf/core/operators/serialization/hdf5dpf_custom_read.py b/src/ansys/dpf/core/operators/serialization/hdf5dpf_custom_read.py index 592cf835f18..5eaec5742f1 100644 --- a/src/ansys/dpf/core/operators/serialization/hdf5dpf_custom_read.py +++ b/src/ansys/dpf/core/operators/serialization/hdf5dpf_custom_read.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.data_tree import DataTree + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class hdf5dpf_custom_read(Operator): r"""Extract a custom result from an hdf5dpf file. This operator is @@ -227,21 +234,33 @@ class InputsHdf5DpfCustomRead(_Inputs): def __init__(self, op: Operator): super().__init__(hdf5dpf_custom_read._spec().inputs, op) - self._time_scoping = Input(hdf5dpf_custom_read._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping] = Input( + hdf5dpf_custom_read._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(hdf5dpf_custom_read._spec().input_pin(1), 1, op, -1) + self._mesh_scoping: Input[Scoping] = Input( + hdf5dpf_custom_read._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._streams = Input(hdf5dpf_custom_read._spec().input_pin(3), 3, op, -1) + self._streams: Input[StreamsContainer] = Input( + hdf5dpf_custom_read._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams) - self._data_sources = Input(hdf5dpf_custom_read._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + hdf5dpf_custom_read._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._meta_data = Input(hdf5dpf_custom_read._spec().input_pin(24), 24, op, -1) + self._meta_data: Input[DataTree] = Input( + hdf5dpf_custom_read._spec().input_pin(24), 24, op, -1 + ) self._inputs.append(self._meta_data) - self._result_name = Input(hdf5dpf_custom_read._spec().input_pin(60), 60, op, -1) + self._result_name: Input = Input( + hdf5dpf_custom_read._spec().input_pin(60), 60, op, -1 + ) self._inputs.append(self._result_name) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -260,7 +279,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -279,7 +298,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Hdf5df file stream. @@ -300,7 +319,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Hdf5df file data source. @@ -321,7 +340,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def meta_data(self) -> Input: + def meta_data(self) -> Input[DataTree]: r"""Allows to connect meta_data input to the operator. meta_data that may be used to evaluate results or extract workflows. diff --git a/src/ansys/dpf/core/operators/serialization/hdf5dpf_generate_result_file.py b/src/ansys/dpf/core/operators/serialization/hdf5dpf_generate_result_file.py index 0ab46fdb8fc..883bf53a6dd 100644 --- a/src/ansys/dpf/core/operators/serialization/hdf5dpf_generate_result_file.py +++ b/src/ansys/dpf/core/operators/serialization/hdf5dpf_generate_result_file.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.data_tree import DataTree + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.result_info import ResultInfo + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class hdf5dpf_generate_result_file(Operator): r"""Generate a dpf result file from provided information. @@ -313,53 +322,53 @@ class InputsHdf5DpfGenerateResultFile(_Inputs): def __init__(self, op: Operator): super().__init__(hdf5dpf_generate_result_file._spec().inputs, op) - self._h5_chunk_size = Input( + self._h5_chunk_size: Input[int] = Input( hdf5dpf_generate_result_file._spec().input_pin(-7), -7, op, -1 ) self._inputs.append(self._h5_chunk_size) - self._append_mode = Input( + self._append_mode: Input[bool] = Input( hdf5dpf_generate_result_file._spec().input_pin(-6), -6, op, -1 ) self._inputs.append(self._append_mode) - self._dataset_size_compression_threshold = Input( + self._dataset_size_compression_threshold: Input[int] = Input( hdf5dpf_generate_result_file._spec().input_pin(-5), -5, op, -1 ) self._inputs.append(self._dataset_size_compression_threshold) - self._h5_native_compression = Input( + self._h5_native_compression: Input[int | DataTree] = Input( hdf5dpf_generate_result_file._spec().input_pin(-2), -2, op, -1 ) self._inputs.append(self._h5_native_compression) - self._export_floats = Input( + self._export_floats: Input[bool] = Input( hdf5dpf_generate_result_file._spec().input_pin(-1), -1, op, -1 ) self._inputs.append(self._export_floats) - self._filename = Input( + self._filename: Input[str] = Input( hdf5dpf_generate_result_file._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._filename) - self._mesh_provider_out = Input( + self._mesh_provider_out: Input[MeshedRegion] = Input( hdf5dpf_generate_result_file._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_provider_out) - self._time_freq_support_out = Input( + self._time_freq_support_out: Input[TimeFreqSupport] = Input( hdf5dpf_generate_result_file._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._time_freq_support_out) - self._ansys_unit_system_id = Input( + self._ansys_unit_system_id: Input[int | ResultInfo] = Input( hdf5dpf_generate_result_file._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._ansys_unit_system_id) - self._input_name1 = Input( + self._input_name1: Input[str | Any] = Input( hdf5dpf_generate_result_file._spec().input_pin(4), 4, op, 0 ) self._inputs.append(self._input_name1) - self._input_name2 = Input( + self._input_name2: Input[str | Any] = Input( hdf5dpf_generate_result_file._spec().input_pin(5), 5, op, 1 ) self._inputs.append(self._input_name2) @property - def h5_chunk_size(self) -> Input: + def h5_chunk_size(self) -> Input[int]: r"""Allows to connect h5_chunk_size input to the operator. Size of each HDF5 chunk in kilobytes (KB). Default: 1 MB when compression is enabled; for uncompressed datasets, the default is the full dataset size x dimension. @@ -380,7 +389,7 @@ def h5_chunk_size(self) -> Input: return self._h5_chunk_size @property - def append_mode(self) -> Input: + def append_mode(self) -> Input[bool]: r"""Allows to connect append_mode input to the operator. Experimental: Allow appending chunked data to the file. This disables fields container content deduplication. @@ -401,7 +410,7 @@ def append_mode(self) -> Input: return self._append_mode @property - def dataset_size_compression_threshold(self) -> Input: + def dataset_size_compression_threshold(self) -> Input[int]: r"""Allows to connect dataset_size_compression_threshold input to the operator. Integer value that defines the minimum dataset size (in bytes) to use h5 native compression Applicable for arrays of floats, doubles and integers. @@ -422,7 +431,7 @@ def dataset_size_compression_threshold(self) -> Input: return self._dataset_size_compression_threshold @property - def h5_native_compression(self) -> Input: + def h5_native_compression(self) -> Input[int | DataTree]: r"""Allows to connect h5_native_compression input to the operator. Integer value / DataTree that defines the h5 native compression used For Integer Input {0: No Compression (default); 1-9: GZIP Compression : 9 provides maximum compression but at the slowest speed.}For DataTree Input {type: None / GZIP / ZSTD; level: GZIP (1-9) / ZSTD (1-20); num_threads: ZSTD (>0)} @@ -443,7 +452,7 @@ def h5_native_compression(self) -> Input: return self._h5_native_compression @property - def export_floats(self) -> Input: + def export_floats(self) -> Input[bool]: r"""Allows to connect export_floats input to the operator. converts double to float to reduce file size (default is true) @@ -464,7 +473,7 @@ def export_floats(self) -> Input: return self._export_floats @property - def filename(self) -> Input: + def filename(self) -> Input[str]: r"""Allows to connect filename input to the operator. name of the output file that will be generated (utf8). @@ -485,7 +494,7 @@ def filename(self) -> Input: return self._filename @property - def mesh_provider_out(self) -> Input: + def mesh_provider_out(self) -> Input[MeshedRegion]: r"""Allows to connect mesh_provider_out input to the operator. defines the MeshedRegion that is exported and provided by MeshProvider. @@ -506,7 +515,7 @@ def mesh_provider_out(self) -> Input: return self._mesh_provider_out @property - def time_freq_support_out(self) -> Input: + def time_freq_support_out(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support_out input to the operator. defines the TimeFreqSupport that is exported and provided by TimeFreqSupportProvider. @@ -527,7 +536,7 @@ def time_freq_support_out(self) -> Input: return self._time_freq_support_out @property - def ansys_unit_system_id(self) -> Input: + def ansys_unit_system_id(self) -> Input[int | ResultInfo]: r"""Allows to connect ansys_unit_system_id input to the operator. defines the unit system the results are exported with. A Result info can be input to also export Physics Type and Analysis Type. @@ -548,7 +557,7 @@ def ansys_unit_system_id(self) -> Input: return self._ansys_unit_system_id @property - def input_name1(self) -> Input: + def input_name1(self) -> Input[str | Any]: r"""Allows to connect input_name1 input to the operator. Set of even and odd pins to serialize results. Odd pins (4, 6, 8...) are strings, and they represent the names of the results to be serialized. Even pins (5, 7, 9...) are DPF types, and they represent the results to be serialized. They should go in pairs (for each result name, there should be a result) and connected sequentially. @@ -569,7 +578,7 @@ def input_name1(self) -> Input: return self._input_name1 @property - def input_name2(self) -> Input: + def input_name2(self) -> Input[str | Any]: r"""Allows to connect input_name2 input to the operator. Set of even and odd pins to serialize results. Odd pins (4, 6, 8...) are strings, and they represent the names of the results to be serialized. Even pins (5, 7, 9...) are DPF types, and they represent the results to be serialized. They should go in pairs (for each result name, there should be a result) and connected sequentially. @@ -604,13 +613,13 @@ class OutputsHdf5DpfGenerateResultFile(_Outputs): def __init__(self, op: Operator): super().__init__(hdf5dpf_generate_result_file._spec().outputs, op) - self._data_sources = Output( + self._data_sources: Output[DataSources] = Output( hdf5dpf_generate_result_file._spec().output_pin(0), 0, op ) self._outputs.append(self._data_sources) @property - def data_sources(self) -> Output: + def data_sources(self) -> Output[DataSources]: r"""Allows to get data_sources output of the operator data_sources filled with the H5 generated file path. diff --git a/src/ansys/dpf/core/operators/serialization/import_symbolic_workflow.py b/src/ansys/dpf/core/operators/serialization/import_symbolic_workflow.py index c5dd6657abc..4df35c81860 100644 --- a/src/ansys/dpf/core/operators/serialization/import_symbolic_workflow.py +++ b/src/ansys/dpf/core/operators/serialization/import_symbolic_workflow.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.workflow import Workflow + class import_symbolic_workflow(Operator): r"""Reads a file or string holding a Symbolic Workflow and instantiate a @@ -157,15 +162,17 @@ class InputsImportSymbolicWorkflow(_Inputs): def __init__(self, op: Operator): super().__init__(import_symbolic_workflow._spec().inputs, op) - self._string_or_path = Input( + self._string_or_path: Input[str | DataSources] = Input( import_symbolic_workflow._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._string_or_path) - self._format = Input(import_symbolic_workflow._spec().input_pin(2), 2, op, -1) + self._format: Input[int] = Input( + import_symbolic_workflow._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._format) @property - def string_or_path(self) -> Input: + def string_or_path(self) -> Input[str | DataSources]: r"""Allows to connect string_or_path input to the operator. Returns @@ -184,7 +191,7 @@ def string_or_path(self) -> Input: return self._string_or_path @property - def format(self) -> Input: + def format(self) -> Input[int]: r"""Allows to connect format input to the operator. -1 is auto-detection, 0 is ASCII format, 1 is binary, 2 is json, default is -1 (auto-detection). @@ -219,11 +226,13 @@ class OutputsImportSymbolicWorkflow(_Outputs): def __init__(self, op: Operator): super().__init__(import_symbolic_workflow._spec().outputs, op) - self._workflow = Output(import_symbolic_workflow._spec().output_pin(0), 0, op) + self._workflow: Output[Workflow] = Output( + import_symbolic_workflow._spec().output_pin(0), 0, op + ) self._outputs.append(self._workflow) @property - def workflow(self) -> Output: + def workflow(self) -> Output[Workflow]: r"""Allows to get workflow output of the operator Returns diff --git a/src/ansys/dpf/core/operators/serialization/json_to_data_tree.py b/src/ansys/dpf/core/operators/serialization/json_to_data_tree.py index f61f36f84d3..34207ea92a1 100644 --- a/src/ansys/dpf/core/operators/serialization/json_to_data_tree.py +++ b/src/ansys/dpf/core/operators/serialization/json_to_data_tree.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.data_tree import DataTree + class json_to_data_tree(Operator): r"""Reads a json file or string to a DataTree @@ -140,11 +145,13 @@ class InputsJsonToDataTree(_Inputs): def __init__(self, op: Operator): super().__init__(json_to_data_tree._spec().inputs, op) - self._string_or_path = Input(json_to_data_tree._spec().input_pin(0), 0, op, -1) + self._string_or_path: Input[str | DataSources] = Input( + json_to_data_tree._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._string_or_path) @property - def string_or_path(self) -> Input: + def string_or_path(self) -> Input[str | DataSources]: r"""Allows to connect string_or_path input to the operator. Returns @@ -177,11 +184,13 @@ class OutputsJsonToDataTree(_Outputs): def __init__(self, op: Operator): super().__init__(json_to_data_tree._spec().outputs, op) - self._data_tree = Output(json_to_data_tree._spec().output_pin(0), 0, op) + self._data_tree: Output[DataTree] = Output( + json_to_data_tree._spec().output_pin(0), 0, op + ) self._outputs.append(self._data_tree) @property - def data_tree(self) -> Output: + def data_tree(self) -> Output[DataTree]: r"""Allows to get data_tree output of the operator Returns diff --git a/src/ansys/dpf/core/operators/serialization/migrate_file_to_vtk.py b/src/ansys/dpf/core/operators/serialization/migrate_file_to_vtk.py index a1065dabbfb..71940550844 100644 --- a/src/ansys/dpf/core/operators/serialization/migrate_file_to_vtk.py +++ b/src/ansys/dpf/core/operators/serialization/migrate_file_to_vtk.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + class migrate_file_to_vtk(Operator): r"""Take an input data sources or streams and convert as much data as @@ -178,19 +183,21 @@ class InputsMigrateFileToVtk(_Inputs): def __init__(self, op: Operator): super().__init__(migrate_file_to_vtk._spec().inputs, op) - self._output_filename = Input( + self._output_filename: Input[str] = Input( migrate_file_to_vtk._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._output_filename) - self._streams_container = Input( + self._streams_container: Input[StreamsContainer] = Input( migrate_file_to_vtk._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_container) - self._data_sources = Input(migrate_file_to_vtk._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + migrate_file_to_vtk._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def output_filename(self) -> Input: + def output_filename(self) -> Input[str]: r"""Allows to connect output_filename input to the operator. Returns @@ -209,7 +216,7 @@ def output_filename(self) -> Input: return self._output_filename @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. Returns @@ -228,7 +235,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -261,11 +268,13 @@ class OutputsMigrateFileToVtk(_Outputs): def __init__(self, op: Operator): super().__init__(migrate_file_to_vtk._spec().outputs, op) - self._data_sources = Output(migrate_file_to_vtk._spec().output_pin(0), 0, op) + self._data_sources: Output[DataSources] = Output( + migrate_file_to_vtk._spec().output_pin(0), 0, op + ) self._outputs.append(self._data_sources) @property - def data_sources(self) -> Output: + def data_sources(self) -> Output[DataSources]: r"""Allows to get data_sources output of the operator Generated output vtk file diff --git a/src/ansys/dpf/core/operators/serialization/migrate_to_vtu.py b/src/ansys/dpf/core/operators/serialization/migrate_to_vtu.py index c34817c3219..4b6d304339f 100644 --- a/src/ansys/dpf/core/operators/serialization/migrate_to_vtu.py +++ b/src/ansys/dpf/core/operators/serialization/migrate_to_vtu.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + class migrate_to_vtu(Operator): r"""Extract all results from a datasources and exports them into vtu format. @@ -263,25 +269,41 @@ class InputsMigrateToVtu(_Inputs): def __init__(self, op: Operator): super().__init__(migrate_to_vtu._spec().inputs, op) - self._time_scoping = Input(migrate_to_vtu._spec().input_pin(0), 0, op, -1) + self._time_scoping: Input[Scoping] = Input( + migrate_to_vtu._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._streams_container = Input(migrate_to_vtu._spec().input_pin(3), 3, op, -1) + self._streams_container: Input[StreamsContainer] = Input( + migrate_to_vtu._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(migrate_to_vtu._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + migrate_to_vtu._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._directory = Input(migrate_to_vtu._spec().input_pin(20), 20, op, -1) + self._directory: Input[str] = Input( + migrate_to_vtu._spec().input_pin(20), 20, op, -1 + ) self._inputs.append(self._directory) - self._base_name = Input(migrate_to_vtu._spec().input_pin(21), 21, op, -1) + self._base_name: Input[str] = Input( + migrate_to_vtu._spec().input_pin(21), 21, op, -1 + ) self._inputs.append(self._base_name) - self._result1 = Input(migrate_to_vtu._spec().input_pin(30), 30, op, 0) + self._result1: Input[str] = Input( + migrate_to_vtu._spec().input_pin(30), 30, op, 0 + ) self._inputs.append(self._result1) - self._result2 = Input(migrate_to_vtu._spec().input_pin(31), 31, op, 1) + self._result2: Input[str] = Input( + migrate_to_vtu._spec().input_pin(31), 31, op, 1 + ) self._inputs.append(self._result2) - self._write_mode = Input(migrate_to_vtu._spec().input_pin(100), 100, op, -1) + self._write_mode: Input[str] = Input( + migrate_to_vtu._spec().input_pin(100), 100, op, -1 + ) self._inputs.append(self._write_mode) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. time sets to export, default is all @@ -302,7 +324,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def streams_container(self) -> Input: + def streams_container(self) -> Input[StreamsContainer]: r"""Allows to connect streams_container input to the operator. result file container allowed to be kept open to cache data @@ -323,7 +345,7 @@ def streams_container(self) -> Input: return self._streams_container @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. result file path container, used if no streams are set @@ -344,7 +366,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def directory(self) -> Input: + def directory(self) -> Input[str]: r"""Allows to connect directory input to the operator. directory path @@ -365,7 +387,7 @@ def directory(self) -> Input: return self._directory @property - def base_name(self) -> Input: + def base_name(self) -> Input[str]: r"""Allows to connect base_name input to the operator. vtu base file name, (default is file) @@ -386,7 +408,7 @@ def base_name(self) -> Input: return self._base_name @property - def result1(self) -> Input: + def result1(self) -> Input[str]: r"""Allows to connect result1 input to the operator. if Operator's names are connected to this Pin, only these results are exported (else all available results are exported) @@ -407,7 +429,7 @@ def result1(self) -> Input: return self._result1 @property - def result2(self) -> Input: + def result2(self) -> Input[str]: r"""Allows to connect result2 input to the operator. if Operator's names are connected to this Pin, only these results are exported (else all available results are exported) @@ -428,7 +450,7 @@ def result2(self) -> Input: return self._result2 @property - def write_mode(self) -> Input: + def write_mode(self) -> Input[str]: r"""Allows to connect write_mode input to the operator. Available are rawbinarycompressed, rawbinary, base64appended, base64inline, ascii, default is (rawbinarycompressed) @@ -463,11 +485,13 @@ class OutputsMigrateToVtu(_Outputs): def __init__(self, op: Operator): super().__init__(migrate_to_vtu._spec().outputs, op) - self._path = Output(migrate_to_vtu._spec().output_pin(0), 0, op) + self._path: Output[DataSources] = Output( + migrate_to_vtu._spec().output_pin(0), 0, op + ) self._outputs.append(self._path) @property - def path(self) -> Output: + def path(self) -> Output[DataSources]: r"""Allows to get path output of the operator list of output vtu file path diff --git a/src/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py b/src/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py index 1e48fdf9f08..2720d26e159 100644 --- a/src/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py +++ b/src/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py @@ -202,21 +202,25 @@ class InputsSerializeToHdf5(_Inputs): def __init__(self, op: Operator): super().__init__(serialize_to_hdf5._spec().inputs, op) - self._file_path = Input(serialize_to_hdf5._spec().input_pin(0), 0, op, -1) + self._file_path: Input[str] = Input( + serialize_to_hdf5._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._file_path) - self._export_floats = Input(serialize_to_hdf5._spec().input_pin(1), 1, op, -1) + self._export_floats: Input[bool] = Input( + serialize_to_hdf5._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._export_floats) - self._export_flat_vectors = Input( + self._export_flat_vectors: Input[bool] = Input( serialize_to_hdf5._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._export_flat_vectors) - self._data1 = Input(serialize_to_hdf5._spec().input_pin(3), 3, op, 0) + self._data1: Input = Input(serialize_to_hdf5._spec().input_pin(3), 3, op, 0) self._inputs.append(self._data1) - self._data2 = Input(serialize_to_hdf5._spec().input_pin(4), 4, op, 1) + self._data2: Input = Input(serialize_to_hdf5._spec().input_pin(4), 4, op, 1) self._inputs.append(self._data2) @property - def file_path(self) -> Input: + def file_path(self) -> Input[str]: r"""Allows to connect file_path input to the operator. output file path with .h5 extension @@ -237,7 +241,7 @@ def file_path(self) -> Input: return self._file_path @property - def export_floats(self) -> Input: + def export_floats(self) -> Input[bool]: r"""Allows to connect export_floats input to the operator. converts double to float to reduce file size (default is true) @@ -258,7 +262,7 @@ def export_floats(self) -> Input: return self._export_floats @property - def export_flat_vectors(self) -> Input: + def export_flat_vectors(self) -> Input[bool]: r"""Allows to connect export_flat_vectors input to the operator. if true, vectors and matrices data are exported flat (x1,y1,z1,x2,y2,z2..) (default is false) diff --git a/src/ansys/dpf/core/operators/serialization/serializer.py b/src/ansys/dpf/core/operators/serialization/serializer.py index 8dfde3c0c28..44524bf8c6b 100644 --- a/src/ansys/dpf/core/operators/serialization/serializer.py +++ b/src/ansys/dpf/core/operators/serialization/serializer.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + class serializer(Operator): r"""Take any input and serialize them in a file. @@ -193,17 +197,19 @@ class InputsSerializer(_Inputs): def __init__(self, op: Operator): super().__init__(serializer._spec().inputs, op) - self._stream_type = Input(serializer._spec().input_pin(-1), -1, op, -1) + self._stream_type: Input[int] = Input( + serializer._spec().input_pin(-1), -1, op, -1 + ) self._inputs.append(self._stream_type) - self._file_path = Input(serializer._spec().input_pin(0), 0, op, -1) + self._file_path: Input[str] = Input(serializer._spec().input_pin(0), 0, op, -1) self._inputs.append(self._file_path) - self._any_input1 = Input(serializer._spec().input_pin(1), 1, op, 0) + self._any_input1: Input[Any] = Input(serializer._spec().input_pin(1), 1, op, 0) self._inputs.append(self._any_input1) - self._any_input2 = Input(serializer._spec().input_pin(2), 2, op, 1) + self._any_input2: Input[Any] = Input(serializer._spec().input_pin(2), 2, op, 1) self._inputs.append(self._any_input2) @property - def stream_type(self) -> Input: + def stream_type(self) -> Input[int]: r"""Allows to connect stream_type input to the operator. 0 for ASCII (default), and 1 for binary @@ -224,7 +230,7 @@ def stream_type(self) -> Input: return self._stream_type @property - def file_path(self) -> Input: + def file_path(self) -> Input[str]: r"""Allows to connect file_path input to the operator. Returns @@ -243,7 +249,7 @@ def file_path(self) -> Input: return self._file_path @property - def any_input1(self) -> Input: + def any_input1(self) -> Input[Any]: r"""Allows to connect any_input1 input to the operator. any input @@ -264,7 +270,7 @@ def any_input1(self) -> Input: return self._any_input1 @property - def any_input2(self) -> Input: + def any_input2(self) -> Input[Any]: r"""Allows to connect any_input2 input to the operator. any input @@ -299,11 +305,11 @@ class OutputsSerializer(_Outputs): def __init__(self, op: Operator): super().__init__(serializer._spec().outputs, op) - self._file_path = Output(serializer._spec().output_pin(0), 0, op) + self._file_path: Output[str] = Output(serializer._spec().output_pin(0), 0, op) self._outputs.append(self._file_path) @property - def file_path(self) -> Output: + def file_path(self) -> Output[str]: r"""Allows to get file_path output of the operator Returns diff --git a/src/ansys/dpf/core/operators/serialization/serializer_to_string.py b/src/ansys/dpf/core/operators/serialization/serializer_to_string.py index 0312c740f17..0d7c00edef4 100644 --- a/src/ansys/dpf/core/operators/serialization/serializer_to_string.py +++ b/src/ansys/dpf/core/operators/serialization/serializer_to_string.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + class serializer_to_string(Operator): r"""Take any input and serialize them in a string. @@ -195,17 +199,21 @@ class InputsSerializerToString(_Inputs): def __init__(self, op: Operator): super().__init__(serializer_to_string._spec().inputs, op) - self._stream_type = Input( + self._stream_type: Input[int] = Input( serializer_to_string._spec().input_pin(-1), -1, op, -1 ) self._inputs.append(self._stream_type) - self._any_input1 = Input(serializer_to_string._spec().input_pin(1), 1, op, 0) + self._any_input1: Input[Any] = Input( + serializer_to_string._spec().input_pin(1), 1, op, 0 + ) self._inputs.append(self._any_input1) - self._any_input2 = Input(serializer_to_string._spec().input_pin(2), 2, op, 1) + self._any_input2: Input[Any] = Input( + serializer_to_string._spec().input_pin(2), 2, op, 1 + ) self._inputs.append(self._any_input2) @property - def stream_type(self) -> Input: + def stream_type(self) -> Input[int]: r"""Allows to connect stream_type input to the operator. 0 for string (default), 1 for binary, 2 for binary with chunked output (the output string will be returned in several chunks to prevent string memory overflows). @@ -226,7 +234,7 @@ def stream_type(self) -> Input: return self._stream_type @property - def any_input1(self) -> Input: + def any_input1(self) -> Input[Any]: r"""Allows to connect any_input1 input to the operator. any input @@ -247,7 +255,7 @@ def any_input1(self) -> Input: return self._any_input1 @property - def any_input2(self) -> Input: + def any_input2(self) -> Input[Any]: r"""Allows to connect any_input2 input to the operator. any input @@ -284,19 +292,21 @@ class OutputsSerializerToString(_Outputs): def __init__(self, op: Operator): super().__init__(serializer_to_string._spec().outputs, op) - self._nof_chunks = Output(serializer_to_string._spec().output_pin(-1), -1, op) + self._nof_chunks: Output[int] = Output( + serializer_to_string._spec().output_pin(-1), -1, op + ) self._outputs.append(self._nof_chunks) - self._serialized_string1 = Output( + self._serialized_string1: Output[str] = Output( serializer_to_string._spec().output_pin(0), 0, op ) self._outputs.append(self._serialized_string1) - self._serialized_string2 = Output( + self._serialized_string2: Output[str] = Output( serializer_to_string._spec().output_pin(1), 1, op ) self._outputs.append(self._serialized_string2) @property - def nof_chunks(self) -> Output: + def nof_chunks(self) -> Output[int]: r"""Allows to get nof_chunks output of the operator Number of chunks when mode passed to input pin(-1) = 2. @@ -316,7 +326,7 @@ def nof_chunks(self) -> Output: return self._nof_chunks @property - def serialized_string1(self) -> Output: + def serialized_string1(self) -> Output[str]: r"""Allows to get serialized_string1 output of the operator Returns @@ -334,7 +344,7 @@ def serialized_string1(self) -> Output: return self._serialized_string1 @property - def serialized_string2(self) -> Output: + def serialized_string2(self) -> Output[str]: r"""Allows to get serialized_string2 output of the operator Returns diff --git a/src/ansys/dpf/core/operators/serialization/string_deserializer.py b/src/ansys/dpf/core/operators/serialization/string_deserializer.py index 3653b0ac0cd..e9ec65b66db 100644 --- a/src/ansys/dpf/core/operators/serialization/string_deserializer.py +++ b/src/ansys/dpf/core/operators/serialization/string_deserializer.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + class string_deserializer(Operator): r"""Takes a string generated by the serializer and deserializes it into @@ -188,19 +192,21 @@ class InputsStringDeserializer(_Inputs): def __init__(self, op: Operator): super().__init__(string_deserializer._spec().inputs, op) - self._stream_type = Input(string_deserializer._spec().input_pin(-1), -1, op, -1) + self._stream_type: Input[int] = Input( + string_deserializer._spec().input_pin(-1), -1, op, -1 + ) self._inputs.append(self._stream_type) - self._serialized_string1 = Input( + self._serialized_string1: Input[str] = Input( string_deserializer._spec().input_pin(0), 0, op, 0 ) self._inputs.append(self._serialized_string1) - self._serialized_string2 = Input( + self._serialized_string2: Input[str] = Input( string_deserializer._spec().input_pin(1), 1, op, 1 ) self._inputs.append(self._serialized_string2) @property - def stream_type(self) -> Input: + def stream_type(self) -> Input[int]: r"""Allows to connect stream_type input to the operator. 0 for string (default), and 1 for binary @@ -221,7 +227,7 @@ def stream_type(self) -> Input: return self._stream_type @property - def serialized_string1(self) -> Input: + def serialized_string1(self) -> Input[str]: r"""Allows to connect serialized_string1 input to the operator. Returns @@ -240,7 +246,7 @@ def serialized_string1(self) -> Input: return self._serialized_string1 @property - def serialized_string2(self) -> Input: + def serialized_string2(self) -> Input[str]: r"""Allows to connect serialized_string2 input to the operator. Returns @@ -274,13 +280,17 @@ class OutputsStringDeserializer(_Outputs): def __init__(self, op: Operator): super().__init__(string_deserializer._spec().outputs, op) - self._any_output1 = Output(string_deserializer._spec().output_pin(1), 1, op) + self._any_output1: Output[Any] = Output( + string_deserializer._spec().output_pin(1), 1, op + ) self._outputs.append(self._any_output1) - self._any_output2 = Output(string_deserializer._spec().output_pin(2), 2, op) + self._any_output2: Output[Any] = Output( + string_deserializer._spec().output_pin(2), 2, op + ) self._outputs.append(self._any_output2) @property - def any_output1(self) -> Output: + def any_output1(self) -> Output[Any]: r"""Allows to get any_output1 output of the operator number and types of outputs corresponding of the inputs used in the serialization @@ -300,7 +310,7 @@ def any_output1(self) -> Output: return self._any_output1 @property - def any_output2(self) -> Output: + def any_output2(self) -> Output[Any]: r"""Allows to get any_output2 output of the operator number and types of outputs corresponding of the inputs used in the serialization diff --git a/src/ansys/dpf/core/operators/serialization/txt_to_data_tree.py b/src/ansys/dpf/core/operators/serialization/txt_to_data_tree.py index 46c21bdea87..9af072b74d2 100644 --- a/src/ansys/dpf/core/operators/serialization/txt_to_data_tree.py +++ b/src/ansys/dpf/core/operators/serialization/txt_to_data_tree.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.data_tree import DataTree + class txt_to_data_tree(Operator): r"""Reads a txt file or string to a DataTree @@ -140,11 +145,13 @@ class InputsTxtToDataTree(_Inputs): def __init__(self, op: Operator): super().__init__(txt_to_data_tree._spec().inputs, op) - self._string_or_path = Input(txt_to_data_tree._spec().input_pin(0), 0, op, -1) + self._string_or_path: Input[str | DataSources] = Input( + txt_to_data_tree._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._string_or_path) @property - def string_or_path(self) -> Input: + def string_or_path(self) -> Input[str | DataSources]: r"""Allows to connect string_or_path input to the operator. Returns @@ -177,11 +184,13 @@ class OutputsTxtToDataTree(_Outputs): def __init__(self, op: Operator): super().__init__(txt_to_data_tree._spec().outputs, op) - self._data_tree = Output(txt_to_data_tree._spec().output_pin(0), 0, op) + self._data_tree: Output[DataTree] = Output( + txt_to_data_tree._spec().output_pin(0), 0, op + ) self._outputs.append(self._data_tree) @property - def data_tree(self) -> Output: + def data_tree(self) -> Output[DataTree]: r"""Allows to get data_tree output of the operator Returns diff --git a/src/ansys/dpf/core/operators/serialization/vtk_export.py b/src/ansys/dpf/core/operators/serialization/vtk_export.py index 2f79980a14a..fa9008e0f14 100644 --- a/src/ansys/dpf/core/operators/serialization/vtk_export.py +++ b/src/ansys/dpf/core/operators/serialization/vtk_export.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class vtk_export(Operator): r"""Write the input field and fields container into a given vtk path @@ -200,19 +206,27 @@ class InputsVtkExport(_Inputs): def __init__(self, op: Operator): super().__init__(vtk_export._spec().inputs, op) - self._export_type = Input(vtk_export._spec().input_pin(-1), -1, op, -1) + self._export_type: Input[int] = Input( + vtk_export._spec().input_pin(-1), -1, op, -1 + ) self._inputs.append(self._export_type) - self._file_path = Input(vtk_export._spec().input_pin(0), 0, op, -1) + self._file_path: Input[str] = Input(vtk_export._spec().input_pin(0), 0, op, -1) self._inputs.append(self._file_path) - self._mesh = Input(vtk_export._spec().input_pin(1), 1, op, -1) + self._mesh: Input[MeshedRegion] = Input( + vtk_export._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh) - self._fields1 = Input(vtk_export._spec().input_pin(2), 2, op, 0) + self._fields1: Input[FieldsContainer | Field] = Input( + vtk_export._spec().input_pin(2), 2, op, 0 + ) self._inputs.append(self._fields1) - self._fields2 = Input(vtk_export._spec().input_pin(3), 3, op, 1) + self._fields2: Input[FieldsContainer | Field] = Input( + vtk_export._spec().input_pin(3), 3, op, 1 + ) self._inputs.append(self._fields2) @property - def export_type(self) -> Input: + def export_type(self) -> Input[int]: r"""Allows to connect export_type input to the operator. Either export volume elements in the mesh with their fields with value 0 or faces elements in the mesh with their fields with value 1 (default is 0) @@ -233,7 +247,7 @@ def export_type(self) -> Input: return self._export_type @property - def file_path(self) -> Input: + def file_path(self) -> Input[str]: r"""Allows to connect file_path input to the operator. path with vtk extension were the export occurs @@ -254,7 +268,7 @@ def file_path(self) -> Input: return self._file_path @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. necessary if the first field or fields container don't have a mesh in their support @@ -275,7 +289,7 @@ def mesh(self) -> Input: return self._mesh @property - def fields1(self) -> Input: + def fields1(self) -> Input[FieldsContainer | Field]: r"""Allows to connect fields1 input to the operator. fields exported @@ -296,7 +310,7 @@ def fields1(self) -> Input: return self._fields1 @property - def fields2(self) -> Input: + def fields2(self) -> Input[FieldsContainer | Field]: r"""Allows to connect fields2 input to the operator. fields exported diff --git a/src/ansys/dpf/core/operators/serialization/vtk_to_fields.py b/src/ansys/dpf/core/operators/serialization/vtk_to_fields.py index 401bb3acdb7..dba7315651d 100644 --- a/src/ansys/dpf/core/operators/serialization/vtk_to_fields.py +++ b/src/ansys/dpf/core/operators/serialization/vtk_to_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.streams_container import StreamsContainer + class vtk_to_fields(Operator): r"""Write a field based on a vtk file. @@ -172,15 +178,21 @@ class InputsVtkToFields(_Inputs): def __init__(self, op: Operator): super().__init__(vtk_to_fields._spec().inputs, op) - self._field_name = Input(vtk_to_fields._spec().input_pin(0), 0, op, -1) + self._field_name: Input[str] = Input( + vtk_to_fields._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field_name) - self._streams = Input(vtk_to_fields._spec().input_pin(3), 3, op, -1) + self._streams: Input[StreamsContainer] = Input( + vtk_to_fields._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams) - self._data_sources = Input(vtk_to_fields._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + vtk_to_fields._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property - def field_name(self) -> Input: + def field_name(self) -> Input[str]: r"""Allows to connect field_name input to the operator. name of the field in the vtk file @@ -201,7 +213,7 @@ def field_name(self) -> Input: return self._field_name @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Returns @@ -220,7 +232,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -253,11 +265,13 @@ class OutputsVtkToFields(_Outputs): def __init__(self, op: Operator): super().__init__(vtk_to_fields._spec().outputs, op) - self._fields_container = Output(vtk_to_fields._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + vtk_to_fields._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator fields_container diff --git a/src/ansys/dpf/core/operators/serialization/vtu_export.py b/src/ansys/dpf/core/operators/serialization/vtu_export.py index 784d8ee4829..d96ed1661c7 100644 --- a/src/ansys/dpf/core/operators/serialization/vtu_export.py +++ b/src/ansys/dpf/core/operators/serialization/vtu_export.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,14 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.string_field import StringField + class vtu_export(Operator): r"""Export DPF data into vtu format. @@ -275,27 +284,41 @@ class InputsVtuExport(_Inputs): def __init__(self, op: Operator): super().__init__(vtu_export._spec().inputs, op) - self._directory = Input(vtu_export._spec().input_pin(0), 0, op, -1) + self._directory: Input[str] = Input(vtu_export._spec().input_pin(0), 0, op, -1) self._inputs.append(self._directory) - self._base_name = Input(vtu_export._spec().input_pin(1), 1, op, -1) + self._base_name: Input[str] = Input(vtu_export._spec().input_pin(1), 1, op, -1) self._inputs.append(self._base_name) - self._mesh = Input(vtu_export._spec().input_pin(2), 2, op, -1) + self._mesh: Input[MeshedRegion] = Input( + vtu_export._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._mesh) - self._fields1 = Input(vtu_export._spec().input_pin(3), 3, op, 0) + self._fields1: Input[Field | FieldsContainer | PropertyField] = Input( + vtu_export._spec().input_pin(3), 3, op, 0 + ) self._inputs.append(self._fields1) - self._fields2 = Input(vtu_export._spec().input_pin(4), 4, op, 1) + self._fields2: Input[Field | FieldsContainer | PropertyField] = Input( + vtu_export._spec().input_pin(4), 4, op, 1 + ) self._inputs.append(self._fields2) - self._write_mode = Input(vtu_export._spec().input_pin(100), 100, op, -1) + self._write_mode: Input[str] = Input( + vtu_export._spec().input_pin(100), 100, op, -1 + ) self._inputs.append(self._write_mode) - self._as_point_cloud = Input(vtu_export._spec().input_pin(101), 101, op, -1) + self._as_point_cloud: Input[bool] = Input( + vtu_export._spec().input_pin(101), 101, op, -1 + ) self._inputs.append(self._as_point_cloud) - self._export_faces = Input(vtu_export._spec().input_pin(102), 102, op, -1) + self._export_faces: Input[bool] = Input( + vtu_export._spec().input_pin(102), 102, op, -1 + ) self._inputs.append(self._export_faces) - self._mesh_properties = Input(vtu_export._spec().input_pin(103), 103, op, -1) + self._mesh_properties: Input[StringField] = Input( + vtu_export._spec().input_pin(103), 103, op, -1 + ) self._inputs.append(self._mesh_properties) @property - def directory(self) -> Input: + def directory(self) -> Input[str]: r"""Allows to connect directory input to the operator. directory path @@ -316,7 +339,7 @@ def directory(self) -> Input: return self._directory @property - def base_name(self) -> Input: + def base_name(self) -> Input[str]: r"""Allows to connect base_name input to the operator. vtu base file name, (default is file) @@ -337,7 +360,7 @@ def base_name(self) -> Input: return self._base_name @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion]: r"""Allows to connect mesh input to the operator. mesh @@ -358,7 +381,7 @@ def mesh(self) -> Input: return self._mesh @property - def fields1(self) -> Input: + def fields1(self) -> Input[Field | FieldsContainer | PropertyField]: r"""Allows to connect fields1 input to the operator. Nodal, Face, or Elemental field, fields container (over time), or property field to export. When there is no support available in the exported mesh, that data is ignored. @@ -379,7 +402,7 @@ def fields1(self) -> Input: return self._fields1 @property - def fields2(self) -> Input: + def fields2(self) -> Input[Field | FieldsContainer | PropertyField]: r"""Allows to connect fields2 input to the operator. Nodal, Face, or Elemental field, fields container (over time), or property field to export. When there is no support available in the exported mesh, that data is ignored. @@ -400,7 +423,7 @@ def fields2(self) -> Input: return self._fields2 @property - def write_mode(self) -> Input: + def write_mode(self) -> Input[str]: r"""Allows to connect write_mode input to the operator. Available are rawbinarycompressed, rawbinary, base64appended, base64inline, ascii, default is (rawbinarycompressed) @@ -421,7 +444,7 @@ def write_mode(self) -> Input: return self._write_mode @property - def as_point_cloud(self) -> Input: + def as_point_cloud(self) -> Input[bool]: r"""Allows to connect as_point_cloud input to the operator. Whether to export the mesh as a point cloud. Default is False. @@ -442,7 +465,7 @@ def as_point_cloud(self) -> Input: return self._as_point_cloud @property - def export_faces(self) -> Input: + def export_faces(self) -> Input[bool]: r"""Allows to connect export_faces input to the operator. Whether to also export faces as shell elements when the mesh contains cells. Default is False. @@ -463,7 +486,7 @@ def export_faces(self) -> Input: return self._export_faces @property - def mesh_properties(self) -> Input: + def mesh_properties(self) -> Input[StringField]: r"""Allows to connect mesh_properties input to the operator. List of names of mesh properties to export. @@ -498,11 +521,13 @@ class OutputsVtuExport(_Outputs): def __init__(self, op: Operator): super().__init__(vtu_export._spec().outputs, op) - self._path = Output(vtu_export._spec().output_pin(0), 0, op) + self._path: Output[DataSources] = Output( + vtu_export._spec().output_pin(0), 0, op + ) self._outputs.append(self._path) @property - def path(self) -> Output: + def path(self) -> Output[DataSources]: r"""Allows to get path output of the operator list of output vtu file path diff --git a/src/ansys/dpf/core/operators/serialization/workflow_to_pydpf.py b/src/ansys/dpf/core/operators/serialization/workflow_to_pydpf.py index ed2b4526a17..ada6dcab4e7 100644 --- a/src/ansys/dpf/core/operators/serialization/workflow_to_pydpf.py +++ b/src/ansys/dpf/core/operators/serialization/workflow_to_pydpf.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.workflow import Workflow + class workflow_to_pydpf(Operator): r"""Generates a PyDPF script that can recreate the given workflow. The @@ -158,13 +162,17 @@ class InputsWorkflowToPydpf(_Inputs): def __init__(self, op: Operator): super().__init__(workflow_to_pydpf._spec().inputs, op) - self._workflow = Input(workflow_to_pydpf._spec().input_pin(0), 0, op, -1) + self._workflow: Input[Workflow] = Input( + workflow_to_pydpf._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._workflow) - self._output_path = Input(workflow_to_pydpf._spec().input_pin(1), 1, op, -1) + self._output_path: Input[str] = Input( + workflow_to_pydpf._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._output_path) @property - def workflow(self) -> Input: + def workflow(self) -> Input[Workflow]: r"""Allows to connect workflow input to the operator. Returns @@ -183,7 +191,7 @@ def workflow(self) -> Input: return self._workflow @property - def output_path(self) -> Input: + def output_path(self) -> Input[str]: r"""Allows to connect output_path input to the operator. Returns @@ -216,11 +224,13 @@ class OutputsWorkflowToPydpf(_Outputs): def __init__(self, op: Operator): super().__init__(workflow_to_pydpf._spec().outputs, op) - self._pydpf_code = Output(workflow_to_pydpf._spec().output_pin(0), 0, op) + self._pydpf_code: Output[str] = Output( + workflow_to_pydpf._spec().output_pin(0), 0, op + ) self._outputs.append(self._pydpf_code) @property - def pydpf_code(self) -> Output: + def pydpf_code(self) -> Output[str]: r"""Allows to get pydpf_code output of the operator Returns diff --git a/src/ansys/dpf/core/operators/serialization/workflow_to_workflow_topology.py b/src/ansys/dpf/core/operators/serialization/workflow_to_workflow_topology.py index 1ad79e43b8c..f6872da4f0c 100644 --- a/src/ansys/dpf/core/operators/serialization/workflow_to_workflow_topology.py +++ b/src/ansys/dpf/core/operators/serialization/workflow_to_workflow_topology.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.generic_data_container import GenericDataContainer + from ansys.dpf.core.workflow import Workflow + class workflow_to_workflow_topology(Operator): r"""Creates a GenericDataContainer based on WorkflowTopology structure from @@ -149,13 +154,13 @@ class InputsWorkflowToWorkflowTopology(_Inputs): def __init__(self, op: Operator): super().__init__(workflow_to_workflow_topology._spec().inputs, op) - self._workflow = Input( + self._workflow: Input[Workflow] = Input( workflow_to_workflow_topology._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._workflow) @property - def workflow(self) -> Input: + def workflow(self) -> Input[Workflow]: r"""Allows to connect workflow input to the operator. Returns @@ -188,13 +193,13 @@ class OutputsWorkflowToWorkflowTopology(_Outputs): def __init__(self, op: Operator): super().__init__(workflow_to_workflow_topology._spec().outputs, op) - self._workflow_topology = Output( + self._workflow_topology: Output[GenericDataContainer] = Output( workflow_to_workflow_topology._spec().output_pin(0), 0, op ) self._outputs.append(self._workflow_topology) @property - def workflow_topology(self) -> Output: + def workflow_topology(self) -> Output[GenericDataContainer]: r"""Allows to get workflow_topology output of the operator Returns diff --git a/src/ansys/dpf/core/operators/server/grpc_shutdown_server.py b/src/ansys/dpf/core/operators/server/grpc_shutdown_server.py index e57427bd4a1..8f023fcb8a6 100644 --- a/src/ansys/dpf/core/operators/server/grpc_shutdown_server.py +++ b/src/ansys/dpf/core/operators/server/grpc_shutdown_server.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.streams_container import StreamsContainer + class grpc_shutdown_server(Operator): r"""Shutdowns dpf’s grpc server @@ -131,11 +135,13 @@ class InputsGrpcShutdownServer(_Inputs): def __init__(self, op: Operator): super().__init__(grpc_shutdown_server._spec().inputs, op) - self._grpc_stream = Input(grpc_shutdown_server._spec().input_pin(0), 0, op, -1) + self._grpc_stream: Input[StreamsContainer] = Input( + grpc_shutdown_server._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._grpc_stream) @property - def grpc_stream(self) -> Input: + def grpc_stream(self) -> Input[StreamsContainer]: r"""Allows to connect grpc_stream input to the operator. dpf streams handling the server diff --git a/src/ansys/dpf/core/operators/server/grpc_start_server.py b/src/ansys/dpf/core/operators/server/grpc_start_server.py index 6bb21ccc08f..105174f3a6d 100644 --- a/src/ansys/dpf/core/operators/server/grpc_start_server.py +++ b/src/ansys/dpf/core/operators/server/grpc_start_server.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + class grpc_start_server(Operator): r"""Starts a dpf’s grpc server (if local) or connect to one and keep it @@ -229,23 +234,31 @@ class InputsGrpcStartServer(_Inputs): def __init__(self, op: Operator): super().__init__(grpc_start_server._spec().inputs, op) - self._ip = Input(grpc_start_server._spec().input_pin(0), 0, op, -1) + self._ip: Input[str] = Input(grpc_start_server._spec().input_pin(0), 0, op, -1) self._inputs.append(self._ip) - self._port = Input(grpc_start_server._spec().input_pin(1), 1, op, -1) + self._port: Input[str | int] = Input( + grpc_start_server._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._port) - self._starting_option = Input(grpc_start_server._spec().input_pin(2), 2, op, -1) + self._starting_option: Input[int] = Input( + grpc_start_server._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._starting_option) - self._should_start_server = Input( + self._should_start_server: Input[bool] = Input( grpc_start_server._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._should_start_server) - self._data_sources = Input(grpc_start_server._spec().input_pin(4), 4, op, -1) + self._data_sources: Input[DataSources] = Input( + grpc_start_server._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._dpf_context = Input(grpc_start_server._spec().input_pin(5), 5, op, -1) + self._dpf_context: Input[str | int] = Input( + grpc_start_server._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._dpf_context) @property - def ip(self) -> Input: + def ip(self) -> Input[str]: r"""Allows to connect ip input to the operator. If no ip address is put, the local ip address is taken @@ -266,7 +279,7 @@ def ip(self) -> Input: return self._ip @property - def port(self) -> Input: + def port(self) -> Input[str | int]: r"""Allows to connect port input to the operator. If no port is put, port 50052 is taken @@ -287,7 +300,7 @@ def port(self) -> Input: return self._port @property - def starting_option(self) -> Input: + def starting_option(self) -> Input[int]: r"""Allows to connect starting_option input to the operator. default is 1 that starts server in new thread. With 0, this thread will be waiting for grpc calls and will not be usable for anything else. With 2, it the server will be started in a new process. @@ -308,7 +321,7 @@ def starting_option(self) -> Input: return self._starting_option @property - def should_start_server(self) -> Input: + def should_start_server(self) -> Input[bool]: r"""Allows to connect should_start_server input to the operator. If true, the server is assumed to be local and is started. If false, only a client (able to send grpc calls) will be started @@ -329,7 +342,7 @@ def should_start_server(self) -> Input: return self._should_start_server @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. A data source with result key 'grpc' and file path 'port:ip' can be used instead of the input port and IP. @@ -350,7 +363,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def dpf_context(self) -> Input: + def dpf_context(self) -> Input[str | int]: r"""Allows to connect dpf_context input to the operator. This pin is associated with pin(2) = 2 (server started in a new process). User can enter the integer associated with a DPF context (1: Standalone Context - DpfCoreStandalone.xml, 3: Custom - DpfCustomDefined.xml) or a string with the path of the XML specifying the context. @@ -385,11 +398,13 @@ class OutputsGrpcStartServer(_Outputs): def __init__(self, op: Operator): super().__init__(grpc_start_server._spec().outputs, op) - self._grpc_streams = Output(grpc_start_server._spec().output_pin(0), 0, op) + self._grpc_streams: Output[StreamsContainer] = Output( + grpc_start_server._spec().output_pin(0), 0, op + ) self._outputs.append(self._grpc_streams) @property - def grpc_streams(self) -> Output: + def grpc_streams(self) -> Output[StreamsContainer]: r"""Allows to get grpc_streams output of the operator dpf streams handling the server, if the server is started in this thread, then nothing is added in output diff --git a/src/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices.py b/src/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices.py index 25604e66ca1..9df197991a4 100644 --- a/src/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices.py +++ b/src/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class assemble_scalars_to_matrices(Operator): r"""Take nine scalar fields and assemble them as a 3x3 matrix field. If the @@ -288,31 +292,49 @@ class InputsAssembleScalarsToMatrices(_Inputs): def __init__(self, op: Operator): super().__init__(assemble_scalars_to_matrices._spec().inputs, op) - self._xx = Input(assemble_scalars_to_matrices._spec().input_pin(0), 0, op, -1) + self._xx: Input[Field] = Input( + assemble_scalars_to_matrices._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._xx) - self._yy = Input(assemble_scalars_to_matrices._spec().input_pin(1), 1, op, -1) + self._yy: Input[Field] = Input( + assemble_scalars_to_matrices._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._yy) - self._zz = Input(assemble_scalars_to_matrices._spec().input_pin(2), 2, op, -1) + self._zz: Input[Field] = Input( + assemble_scalars_to_matrices._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._zz) - self._xy = Input(assemble_scalars_to_matrices._spec().input_pin(3), 3, op, -1) + self._xy: Input[Field] = Input( + assemble_scalars_to_matrices._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._xy) - self._yz = Input(assemble_scalars_to_matrices._spec().input_pin(4), 4, op, -1) + self._yz: Input[Field] = Input( + assemble_scalars_to_matrices._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._yz) - self._xz = Input(assemble_scalars_to_matrices._spec().input_pin(5), 5, op, -1) + self._xz: Input[Field] = Input( + assemble_scalars_to_matrices._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._xz) - self._yx = Input(assemble_scalars_to_matrices._spec().input_pin(6), 6, op, -1) + self._yx: Input[Field] = Input( + assemble_scalars_to_matrices._spec().input_pin(6), 6, op, -1 + ) self._inputs.append(self._yx) - self._zy = Input(assemble_scalars_to_matrices._spec().input_pin(7), 7, op, -1) + self._zy: Input[Field] = Input( + assemble_scalars_to_matrices._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._zy) - self._zx = Input(assemble_scalars_to_matrices._spec().input_pin(8), 8, op, -1) + self._zx: Input[Field] = Input( + assemble_scalars_to_matrices._spec().input_pin(8), 8, op, -1 + ) self._inputs.append(self._zx) - self._symmetrical = Input( + self._symmetrical: Input[bool] = Input( assemble_scalars_to_matrices._spec().input_pin(60), 60, op, -1 ) self._inputs.append(self._symmetrical) @property - def xx(self) -> Input: + def xx(self) -> Input[Field]: r"""Allows to connect xx input to the operator. Returns @@ -331,7 +353,7 @@ def xx(self) -> Input: return self._xx @property - def yy(self) -> Input: + def yy(self) -> Input[Field]: r"""Allows to connect yy input to the operator. Returns @@ -350,7 +372,7 @@ def yy(self) -> Input: return self._yy @property - def zz(self) -> Input: + def zz(self) -> Input[Field]: r"""Allows to connect zz input to the operator. Returns @@ -369,7 +391,7 @@ def zz(self) -> Input: return self._zz @property - def xy(self) -> Input: + def xy(self) -> Input[Field]: r"""Allows to connect xy input to the operator. Returns @@ -388,7 +410,7 @@ def xy(self) -> Input: return self._xy @property - def yz(self) -> Input: + def yz(self) -> Input[Field]: r"""Allows to connect yz input to the operator. Returns @@ -407,7 +429,7 @@ def yz(self) -> Input: return self._yz @property - def xz(self) -> Input: + def xz(self) -> Input[Field]: r"""Allows to connect xz input to the operator. Returns @@ -426,7 +448,7 @@ def xz(self) -> Input: return self._xz @property - def yx(self) -> Input: + def yx(self) -> Input[Field]: r"""Allows to connect yx input to the operator. Returns @@ -445,7 +467,7 @@ def yx(self) -> Input: return self._yx @property - def zy(self) -> Input: + def zy(self) -> Input[Field]: r"""Allows to connect zy input to the operator. Returns @@ -464,7 +486,7 @@ def zy(self) -> Input: return self._zy @property - def zx(self) -> Input: + def zx(self) -> Input[Field]: r"""Allows to connect zx input to the operator. Returns @@ -483,7 +505,7 @@ def zx(self) -> Input: return self._zx @property - def symmetrical(self) -> Input: + def symmetrical(self) -> Input[bool]: r"""Allows to connect symmetrical input to the operator. Returns @@ -516,11 +538,13 @@ class OutputsAssembleScalarsToMatrices(_Outputs): def __init__(self, op: Operator): super().__init__(assemble_scalars_to_matrices._spec().outputs, op) - self._field = Output(assemble_scalars_to_matrices._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + assemble_scalars_to_matrices._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices_fc.py b/src/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices_fc.py index c4750d0e1e4..f7cc335efe9 100644 --- a/src/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices_fc.py +++ b/src/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class assemble_scalars_to_matrices_fc(Operator): r"""Take nine scalar fields container and assemble them as a 3x3 matrix @@ -288,49 +292,49 @@ class InputsAssembleScalarsToMatricesFc(_Inputs): def __init__(self, op: Operator): super().__init__(assemble_scalars_to_matrices_fc._spec().inputs, op) - self._xx = Input( + self._xx: Input[FieldsContainer] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._xx) - self._yy = Input( + self._yy: Input[FieldsContainer] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._yy) - self._zz = Input( + self._zz: Input[FieldsContainer] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._zz) - self._xy = Input( + self._xy: Input[FieldsContainer] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._xy) - self._yz = Input( + self._yz: Input[FieldsContainer] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._yz) - self._xz = Input( + self._xz: Input[FieldsContainer] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._xz) - self._yx = Input( + self._yx: Input[FieldsContainer] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(6), 6, op, -1 ) self._inputs.append(self._yx) - self._zy = Input( + self._zy: Input[FieldsContainer] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(7), 7, op, -1 ) self._inputs.append(self._zy) - self._zx = Input( + self._zx: Input[FieldsContainer] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(8), 8, op, -1 ) self._inputs.append(self._zx) - self._symmetrical = Input( + self._symmetrical: Input[bool] = Input( assemble_scalars_to_matrices_fc._spec().input_pin(60), 60, op, -1 ) self._inputs.append(self._symmetrical) @property - def xx(self) -> Input: + def xx(self) -> Input[FieldsContainer]: r"""Allows to connect xx input to the operator. Returns @@ -349,7 +353,7 @@ def xx(self) -> Input: return self._xx @property - def yy(self) -> Input: + def yy(self) -> Input[FieldsContainer]: r"""Allows to connect yy input to the operator. Returns @@ -368,7 +372,7 @@ def yy(self) -> Input: return self._yy @property - def zz(self) -> Input: + def zz(self) -> Input[FieldsContainer]: r"""Allows to connect zz input to the operator. Returns @@ -387,7 +391,7 @@ def zz(self) -> Input: return self._zz @property - def xy(self) -> Input: + def xy(self) -> Input[FieldsContainer]: r"""Allows to connect xy input to the operator. Returns @@ -406,7 +410,7 @@ def xy(self) -> Input: return self._xy @property - def yz(self) -> Input: + def yz(self) -> Input[FieldsContainer]: r"""Allows to connect yz input to the operator. Returns @@ -425,7 +429,7 @@ def yz(self) -> Input: return self._yz @property - def xz(self) -> Input: + def xz(self) -> Input[FieldsContainer]: r"""Allows to connect xz input to the operator. Returns @@ -444,7 +448,7 @@ def xz(self) -> Input: return self._xz @property - def yx(self) -> Input: + def yx(self) -> Input[FieldsContainer]: r"""Allows to connect yx input to the operator. Returns @@ -463,7 +467,7 @@ def yx(self) -> Input: return self._yx @property - def zy(self) -> Input: + def zy(self) -> Input[FieldsContainer]: r"""Allows to connect zy input to the operator. Returns @@ -482,7 +486,7 @@ def zy(self) -> Input: return self._zy @property - def zx(self) -> Input: + def zx(self) -> Input[FieldsContainer]: r"""Allows to connect zx input to the operator. Returns @@ -501,7 +505,7 @@ def zx(self) -> Input: return self._zx @property - def symmetrical(self) -> Input: + def symmetrical(self) -> Input[bool]: r"""Allows to connect symmetrical input to the operator. Returns @@ -534,13 +538,13 @@ class OutputsAssembleScalarsToMatricesFc(_Outputs): def __init__(self, op: Operator): super().__init__(assemble_scalars_to_matrices_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( assemble_scalars_to_matrices_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors.py b/src/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors.py index 02212bc9ac1..4747ffe98bc 100644 --- a/src/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors.py +++ b/src/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class assemble_scalars_to_vectors(Operator): r"""Takes three scalar fields and assembles them as a 3D vector field. @@ -172,15 +176,21 @@ class InputsAssembleScalarsToVectors(_Inputs): def __init__(self, op: Operator): super().__init__(assemble_scalars_to_vectors._spec().inputs, op) - self._x = Input(assemble_scalars_to_vectors._spec().input_pin(0), 0, op, -1) + self._x: Input[Field] = Input( + assemble_scalars_to_vectors._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._x) - self._y = Input(assemble_scalars_to_vectors._spec().input_pin(1), 1, op, -1) + self._y: Input[Field] = Input( + assemble_scalars_to_vectors._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._y) - self._z = Input(assemble_scalars_to_vectors._spec().input_pin(2), 2, op, -1) + self._z: Input[Field] = Input( + assemble_scalars_to_vectors._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._z) @property - def x(self) -> Input: + def x(self) -> Input[Field]: r"""Allows to connect x input to the operator. Returns @@ -199,7 +209,7 @@ def x(self) -> Input: return self._x @property - def y(self) -> Input: + def y(self) -> Input[Field]: r"""Allows to connect y input to the operator. Returns @@ -218,7 +228,7 @@ def y(self) -> Input: return self._y @property - def z(self) -> Input: + def z(self) -> Input[Field]: r"""Allows to connect z input to the operator. Returns @@ -251,11 +261,13 @@ class OutputsAssembleScalarsToVectors(_Outputs): def __init__(self, op: Operator): super().__init__(assemble_scalars_to_vectors._spec().outputs, op) - self._field = Output(assemble_scalars_to_vectors._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + assemble_scalars_to_vectors._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors_fc.py b/src/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors_fc.py index 45a49e0dbc8..4ea023891db 100644 --- a/src/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors_fc.py +++ b/src/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class assemble_scalars_to_vectors_fc(Operator): r"""Takes three scalar fields container and assembles them as a 3D vector @@ -174,15 +178,21 @@ class InputsAssembleScalarsToVectorsFc(_Inputs): def __init__(self, op: Operator): super().__init__(assemble_scalars_to_vectors_fc._spec().inputs, op) - self._x = Input(assemble_scalars_to_vectors_fc._spec().input_pin(0), 0, op, -1) + self._x: Input[FieldsContainer] = Input( + assemble_scalars_to_vectors_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._x) - self._y = Input(assemble_scalars_to_vectors_fc._spec().input_pin(1), 1, op, -1) + self._y: Input[FieldsContainer] = Input( + assemble_scalars_to_vectors_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._y) - self._z = Input(assemble_scalars_to_vectors_fc._spec().input_pin(2), 2, op, -1) + self._z: Input[FieldsContainer] = Input( + assemble_scalars_to_vectors_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._z) @property - def x(self) -> Input: + def x(self) -> Input[FieldsContainer]: r"""Allows to connect x input to the operator. Returns @@ -201,7 +211,7 @@ def x(self) -> Input: return self._x @property - def y(self) -> Input: + def y(self) -> Input[FieldsContainer]: r"""Allows to connect y input to the operator. Returns @@ -220,7 +230,7 @@ def y(self) -> Input: return self._y @property - def z(self) -> Input: + def z(self) -> Input[FieldsContainer]: r"""Allows to connect z input to the operator. Returns @@ -253,13 +263,13 @@ class OutputsAssembleScalarsToVectorsFc(_Outputs): def __init__(self, op: Operator): super().__init__(assemble_scalars_to_vectors_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( assemble_scalars_to_vectors_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/bind_support.py b/src/ansys/dpf/core/operators/utility/bind_support.py index 00c67f87cbd..e69ccca5dd0 100644 --- a/src/ansys/dpf/core/operators/utility/bind_support.py +++ b/src/ansys/dpf/core/operators/utility/bind_support.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + class bind_support(Operator): r"""Ties a support to a field. @@ -156,13 +162,17 @@ class InputsBindSupport(_Inputs): def __init__(self, op: Operator): super().__init__(bind_support._spec().inputs, op) - self._field = Input(bind_support._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + bind_support._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._support = Input(bind_support._spec().input_pin(1), 1, op, -1) + self._support: Input[MeshedRegion] = Input( + bind_support._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._support) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -183,7 +193,7 @@ def field(self) -> Input: return self._field @property - def support(self) -> Input: + def support(self) -> Input[MeshedRegion]: r"""Allows to connect support input to the operator. meshed region or a support of the field @@ -218,11 +228,11 @@ class OutputsBindSupport(_Outputs): def __init__(self, op: Operator): super().__init__(bind_support._spec().outputs, op) - self._field = Output(bind_support._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(bind_support._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/bind_support_fc.py b/src/ansys/dpf/core/operators/utility/bind_support_fc.py index 17097cd9d5f..990b3704803 100644 --- a/src/ansys/dpf/core/operators/utility/bind_support_fc.py +++ b/src/ansys/dpf/core/operators/utility/bind_support_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class bind_support_fc(Operator): r"""Ties a support to a fields container. @@ -159,13 +165,17 @@ class InputsBindSupportFc(_Inputs): def __init__(self, op: Operator): super().__init__(bind_support_fc._spec().inputs, op) - self._fields_container = Input(bind_support_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + bind_support_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._support = Input(bind_support_fc._spec().input_pin(1), 1, op, -1) + self._support: Input[MeshedRegion | TimeFreqSupport] = Input( + bind_support_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._support) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -184,7 +194,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def support(self) -> Input: + def support(self) -> Input[MeshedRegion | TimeFreqSupport]: r"""Allows to connect support input to the operator. Meshed region or a support of the field. @@ -219,11 +229,13 @@ class OutputsBindSupportFc(_Outputs): def __init__(self, op: Operator): super().__init__(bind_support_fc._spec().outputs, op) - self._fields_container = Output(bind_support_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + bind_support_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/change_location.py b/src/ansys/dpf/core/operators/utility/change_location.py index 2b539ed869a..a9dd87a1784 100644 --- a/src/ansys/dpf/core/operators/utility/change_location.py +++ b/src/ansys/dpf/core/operators/utility/change_location.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class change_location(Operator): r"""change the location of a field. @@ -155,13 +159,17 @@ class InputsChangeLocation(_Inputs): def __init__(self, op: Operator): super().__init__(change_location._spec().inputs, op) - self._field = Input(change_location._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input( + change_location._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._new_location = Input(change_location._spec().input_pin(1), 1, op, -1) + self._new_location: Input[str] = Input( + change_location._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._new_location) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -180,7 +188,7 @@ def field(self) -> Input: return self._field @property - def new_location(self) -> Input: + def new_location(self) -> Input[str]: r"""Allows to connect new_location input to the operator. new location of the output field ex 'Nodal', 'ElementalNodal', 'Elemental'... @@ -215,11 +223,13 @@ class OutputsChangeLocation(_Outputs): def __init__(self, op: Operator): super().__init__(change_location._spec().outputs, op) - self._field = Output(change_location._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + change_location._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/change_shell_layers.py b/src/ansys/dpf/core/operators/utility/change_shell_layers.py index 250ce67da2a..a0148736143 100644 --- a/src/ansys/dpf/core/operators/utility/change_shell_layers.py +++ b/src/ansys/dpf/core/operators/utility/change_shell_layers.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,12 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + class change_shell_layers(Operator): r"""Extract the requested shell layers from the input fields. If the fields @@ -206,19 +213,25 @@ class InputsChangeShellLayers(_Inputs): def __init__(self, op: Operator): super().__init__(change_shell_layers._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer | Field] = Input( change_shell_layers._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._e_shell_layer = Input(change_shell_layers._spec().input_pin(1), 1, op, -1) + self._e_shell_layer: Input[int] = Input( + change_shell_layers._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._e_shell_layer) - self._mesh = Input(change_shell_layers._spec().input_pin(2), 2, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + change_shell_layers._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._mesh) - self._merge = Input(change_shell_layers._spec().input_pin(26), 26, op, -1) + self._merge: Input[bool] = Input( + change_shell_layers._spec().input_pin(26), 26, op, -1 + ) self._inputs.append(self._merge) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer | Field]: r"""Allows to connect fields_container input to the operator. Returns @@ -237,7 +250,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def e_shell_layer(self) -> Input: + def e_shell_layer(self) -> Input[int]: r"""Allows to connect e_shell_layer input to the operator. 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -258,7 +271,7 @@ def e_shell_layer(self) -> Input: return self._e_shell_layer @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. Mesh support of the input fields_container, in case it does not have one defined. If the fields_container contains mixed shell/solid results, the mesh is required (either by connecting this pin or in the support). @@ -279,7 +292,7 @@ def mesh(self) -> Input: return self._mesh @property - def merge(self) -> Input: + def merge(self) -> Input[bool]: r"""Allows to connect merge input to the operator. For fields with mixed shell layers (solid/shell elements with heterogeneous shell layers), group all of them in the same field (false by default). diff --git a/src/ansys/dpf/core/operators/utility/compute_time_scoping.py b/src/ansys/dpf/core/operators/utility/compute_time_scoping.py index defda2bddfb..287c04dc5aa 100644 --- a/src/ansys/dpf/core/operators/utility/compute_time_scoping.py +++ b/src/ansys/dpf/core/operators/utility/compute_time_scoping.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class compute_time_scoping(Operator): r"""Computes the time frequency scoping (made of set IDs) necessary to @@ -209,23 +215,25 @@ class InputsComputeTimeScoping(_Inputs): def __init__(self, op: Operator): super().__init__(compute_time_scoping._spec().inputs, op) - self._time_freq_values = Input( + self._time_freq_values: Input[float | Field | TimeFreqSupport] = Input( compute_time_scoping._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_values) - self._step = Input(compute_time_scoping._spec().input_pin(2), 2, op, -1) + self._step: Input[int] = Input( + compute_time_scoping._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._step) - self._interpolation_type = Input( + self._interpolation_type: Input[int] = Input( compute_time_scoping._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._interpolation_type) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( compute_time_scoping._spec().input_pin(8), 8, op, -1 ) self._inputs.append(self._time_freq_support) @property - def time_freq_values(self) -> Input: + def time_freq_values(self) -> Input[float | Field | TimeFreqSupport]: r"""Allows to connect time_freq_values input to the operator. List of frequencies or times needed. To specify load steps, put a field (and not a list) in input with a scoping located on "TimeFreq_steps". @@ -246,7 +254,7 @@ def time_freq_values(self) -> Input: return self._time_freq_values @property - def step(self) -> Input: + def step(self) -> Input[int]: r"""Allows to connect step input to the operator. Returns @@ -265,7 +273,7 @@ def step(self) -> Input: return self._step @property - def interpolation_type(self) -> Input: + def interpolation_type(self) -> Input[int]: r"""Allows to connect interpolation_type input to the operator. 1:ramped' or 2:stepped', default is ramped @@ -286,7 +294,7 @@ def interpolation_type(self) -> Input: return self._interpolation_type @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -320,13 +328,17 @@ class OutputsComputeTimeScoping(_Outputs): def __init__(self, op: Operator): super().__init__(compute_time_scoping._spec().outputs, op) - self._scoping = Output(compute_time_scoping._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + compute_time_scoping._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) - self._field = Output(compute_time_scoping._spec().output_pin(1), 1, op) + self._field: Output[Field] = Output( + compute_time_scoping._spec().output_pin(1), 1, op + ) self._outputs.append(self._field) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator time_scoping @@ -346,7 +358,7 @@ def scoping(self) -> Output: return self._scoping @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator time_freq_values diff --git a/src/ansys/dpf/core/operators/utility/customtypefield_get_attribute.py b/src/ansys/dpf/core/operators/utility/customtypefield_get_attribute.py index af369618cfa..883f9edf6d6 100644 --- a/src/ansys/dpf/core/operators/utility/customtypefield_get_attribute.py +++ b/src/ansys/dpf/core/operators/utility/customtypefield_get_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.custom_type_field import CustomTypeField + class customtypefield_get_attribute(Operator): r"""Gets a property from an input field/field container. A CustomTypeFieldin @@ -170,17 +174,17 @@ class InputsCustomtypefieldGetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(customtypefield_get_attribute._spec().inputs, op) - self._custom_type_field = Input( + self._custom_type_field: Input[CustomTypeField] = Input( customtypefield_get_attribute._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._custom_type_field) - self._property_name = Input( + self._property_name: Input[str] = Input( customtypefield_get_attribute._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._property_name) @property - def custom_type_field(self) -> Input: + def custom_type_field(self) -> Input[CustomTypeField]: r"""Allows to connect custom_type_field input to the operator. Returns @@ -199,7 +203,7 @@ def custom_type_field(self) -> Input: return self._custom_type_field @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Property to get. Accepted inputs are specific strings namely: 'unit, 'name','time_freq_support', 'scoping' and 'header'. diff --git a/src/ansys/dpf/core/operators/utility/cyclic_support_get_attribute.py b/src/ansys/dpf/core/operators/utility/cyclic_support_get_attribute.py index cb3829ee64a..3e8b2a2f334 100644 --- a/src/ansys/dpf/core/operators/utility/cyclic_support_get_attribute.py +++ b/src/ansys/dpf/core/operators/utility/cyclic_support_get_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.cyclic_support import CyclicSupport + class cyclic_support_get_attribute(Operator): r"""A CyclicSupport in pin 0 and a property name (string) in pin 1 are @@ -165,17 +169,17 @@ class InputsCyclicSupportGetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(cyclic_support_get_attribute._spec().inputs, op) - self._cyclic_support = Input( + self._cyclic_support: Input[CyclicSupport] = Input( cyclic_support_get_attribute._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._cyclic_support) - self._property_name = Input( + self._property_name: Input[str] = Input( cyclic_support_get_attribute._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._property_name) @property - def cyclic_support(self) -> Input: + def cyclic_support(self) -> Input[CyclicSupport]: r"""Allows to connect cyclic_support input to the operator. Returns @@ -194,7 +198,7 @@ def cyclic_support(self) -> Input: return self._cyclic_support @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Accepted inputs are: 'element_duplicate_offset' and 'node_duplicate_offset' (returns int). diff --git a/src/ansys/dpf/core/operators/utility/default_value.py b/src/ansys/dpf/core/operators/utility/default_value.py index e1854cc4233..1b2cde8f5ba 100644 --- a/src/ansys/dpf/core/operators/utility/default_value.py +++ b/src/ansys/dpf/core/operators/utility/default_value.py @@ -155,9 +155,11 @@ class InputsDefaultValue(_Inputs): def __init__(self, op: Operator): super().__init__(default_value._spec().inputs, op) - self._forced_value = Input(default_value._spec().input_pin(0), 0, op, -1) + self._forced_value: Input = Input(default_value._spec().input_pin(0), 0, op, -1) self._inputs.append(self._forced_value) - self._default_value = Input(default_value._spec().input_pin(1), 1, op, -1) + self._default_value: Input = Input( + default_value._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._default_value) @property @@ -213,7 +215,7 @@ class OutputsDefaultValue(_Outputs): def __init__(self, op: Operator): super().__init__(default_value._spec().outputs, op) - self._output = Output(default_value._spec().output_pin(0), 0, op) + self._output: Output = Output(default_value._spec().output_pin(0), 0, op) self._outputs.append(self._output) @property diff --git a/src/ansys/dpf/core/operators/utility/delegate_to_operator.py b/src/ansys/dpf/core/operators/utility/delegate_to_operator.py index a09de0aaa92..8e1323cc7fa 100644 --- a/src/ansys/dpf/core/operators/utility/delegate_to_operator.py +++ b/src/ansys/dpf/core/operators/utility/delegate_to_operator.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + class delegate_to_operator(Operator): r"""Delegate the run to an Operator instantiated by the name in input @@ -152,13 +156,13 @@ class InputsDelegateToOperator(_Inputs): def __init__(self, op: Operator): super().__init__(delegate_to_operator._spec().inputs, op) - self._operator_name = Input( + self._operator_name: Input[str] = Input( delegate_to_operator._spec().input_pin(-1), -1, op, -1 ) self._inputs.append(self._operator_name) @property - def operator_name(self) -> Input: + def operator_name(self) -> Input[str]: r"""Allows to connect operator_name input to the operator. Returns @@ -192,13 +196,17 @@ class OutputsDelegateToOperator(_Outputs): def __init__(self, op: Operator): super().__init__(delegate_to_operator._spec().outputs, op) - self._any1 = Output(delegate_to_operator._spec().output_pin(0), 0, op) + self._any1: Output[Any] = Output( + delegate_to_operator._spec().output_pin(0), 0, op + ) self._outputs.append(self._any1) - self._any2 = Output(delegate_to_operator._spec().output_pin(1), 1, op) + self._any2: Output[Any] = Output( + delegate_to_operator._spec().output_pin(1), 1, op + ) self._outputs.append(self._any2) @property - def any1(self) -> Output: + def any1(self) -> Output[Any]: r"""Allows to get any1 output of the operator inputs @@ -218,7 +226,7 @@ def any1(self) -> Output: return self._any1 @property - def any2(self) -> Output: + def any2(self) -> Output[Any]: r"""Allows to get any2 output of the operator inputs diff --git a/src/ansys/dpf/core/operators/utility/ds_get_attribute.py b/src/ansys/dpf/core/operators/utility/ds_get_attribute.py index 0a7729de16e..35d3bc78d8b 100644 --- a/src/ansys/dpf/core/operators/utility/ds_get_attribute.py +++ b/src/ansys/dpf/core/operators/utility/ds_get_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + class ds_get_attribute(Operator): r"""A DataSources in pin 0 and a property name (string) in pin 1 are @@ -217,21 +221,29 @@ class InputsDsGetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(ds_get_attribute._spec().inputs, op) - self._data_sources = Input(ds_get_attribute._spec().input_pin(0), 0, op, -1) + self._data_sources: Input[DataSources] = Input( + ds_get_attribute._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._data_sources) - self._property_name = Input(ds_get_attribute._spec().input_pin(1), 1, op, -1) + self._property_name: Input[str] = Input( + ds_get_attribute._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._property_name) - self._property_index = Input(ds_get_attribute._spec().input_pin(2), 2, op, -1) + self._property_index: Input[int] = Input( + ds_get_attribute._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._property_index) - self._property_key = Input(ds_get_attribute._spec().input_pin(3), 3, op, -1) + self._property_key: Input[str] = Input( + ds_get_attribute._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._property_key) - self._property_result_key = Input( + self._property_result_key: Input[str] = Input( ds_get_attribute._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._property_result_key) @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Returns @@ -250,7 +262,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Accepted inputs are: 'file_path' (returns string), 'result_file_name' (returns string), 'domain_file_path' (returns string), 'domain_result_file_name' (returns string), 'num_keys' (returns int), num_result_key (returns int), num_file_path (returns int), 'num_result_file_path' (returns int), 'key_by_index' (returns string), 'result_key_by_index' (returns string), 'path_by_index' (returns string), 'path_key_by_index' (returns string). @@ -271,7 +283,7 @@ def property_name(self) -> Input: return self._property_name @property - def property_index(self) -> Input: + def property_index(self) -> Input[int]: r"""Allows to connect property_index input to the operator. Index for the property. Must be set for 'domain_file_path', 'domain_result_file_name' 'key_by_index', 'result_key_by_index', 'path_by_index' and 'path_key_by_index' properties. @@ -292,7 +304,7 @@ def property_index(self) -> Input: return self._property_index @property - def property_key(self) -> Input: + def property_key(self) -> Input[str]: r"""Allows to connect property_key input to the operator. Key to look for. Must be set for 'file_path' and 'domain_file_path' properties. @@ -313,7 +325,7 @@ def property_key(self) -> Input: return self._property_key @property - def property_result_key(self) -> Input: + def property_result_key(self) -> Input[str]: r"""Allows to connect property_result_key input to the operator. Result key to look for. Can be used for 'file_path', 'result_file_name', 'domain_file_path' and 'domain_result_file_name'. diff --git a/src/ansys/dpf/core/operators/utility/extract_field.py b/src/ansys/dpf/core/operators/utility/extract_field.py index 28be8d18e15..6f3e3400e97 100644 --- a/src/ansys/dpf/core/operators/utility/extract_field.py +++ b/src/ansys/dpf/core/operators/utility/extract_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class extract_field(Operator): r"""Extracts the fields at the indices defined in the vector (in 1) from the @@ -158,13 +163,15 @@ class InputsExtractField(_Inputs): def __init__(self, op: Operator): super().__init__(extract_field._spec().inputs, op) - self._fields_container = Input(extract_field._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[Field | FieldsContainer] = Input( + extract_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._indices = Input(extract_field._spec().input_pin(1), 1, op, -1) + self._indices: Input = Input(extract_field._spec().input_pin(1), 1, op, -1) self._inputs.append(self._indices) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fields_container input to the operator. if a field is in input, it is passed on as an output @@ -220,11 +227,11 @@ class OutputsExtractField(_Outputs): def __init__(self, op: Operator): super().__init__(extract_field._spec().outputs, op) - self._field = Output(extract_field._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(extract_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/extract_scoping.py b/src/ansys/dpf/core/operators/utility/extract_scoping.py index acd6bc32425..702807d3687 100644 --- a/src/ansys/dpf/core/operators/utility/extract_scoping.py +++ b/src/ansys/dpf/core/operators/utility/extract_scoping.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,17 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.custom_type_field import CustomTypeField + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.property_field import PropertyField + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + from ansys.dpf.core.string_field import StringField + class extract_scoping(Operator): r"""Takes a field type object, mesh or a collection of them and extracts its @@ -24,10 +36,9 @@ class extract_scoping(Operator): Inputs ------ field_or_fields_container: Field or FieldsContainer or PropertyField or - PropertyFieldsContainer or CustomTypeField or - CustomTypeFieldsContainer or StringField or Scoping - or ScopingsContainer or MeshedRegion or - MeshesContainer, optional + PropertyFieldsContainer or CustomTypeField or + CustomTypeFieldsContainer or StringField or Scoping or + ScopingsContainer or MeshedRegion or MeshesContainer, optional requested_location: int, optional If input 0 is a mesh or a meshes_container, the operator returns the nodes scoping, possible locations are: Nodal(default) or Elemental @@ -180,17 +191,37 @@ class InputsExtractScoping(_Inputs): def __init__(self, op: Operator): super().__init__(extract_scoping._spec().inputs, op) - self._field_or_fields_container = Input( - extract_scoping._spec().input_pin(0), 0, op, -1 - ) + self._field_or_fields_container: Input[ + Field + | FieldsContainer + | PropertyField + | CustomTypeField + | StringField + | Scoping + | ScopingsContainer + | MeshedRegion + | MeshesContainer + ] = Input(extract_scoping._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field_or_fields_container) - self._requested_location = Input( + self._requested_location: Input[int] = Input( extract_scoping._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._requested_location) @property - def field_or_fields_container(self) -> Input: + def field_or_fields_container( + self, + ) -> Input[ + Field + | FieldsContainer + | PropertyField + | CustomTypeField + | StringField + | Scoping + | ScopingsContainer + | MeshedRegion + | MeshesContainer + ]: r"""Allows to connect field_or_fields_container input to the operator. Returns @@ -209,7 +240,7 @@ def field_or_fields_container(self) -> Input: return self._field_or_fields_container @property - def requested_location(self) -> Input: + def requested_location(self) -> Input[int]: r"""Allows to connect requested_location input to the operator. If input 0 is a mesh or a meshes_container, the operator returns the nodes scoping, possible locations are: Nodal(default) or Elemental diff --git a/src/ansys/dpf/core/operators/utility/extract_sub_fc.py b/src/ansys/dpf/core/operators/utility/extract_sub_fc.py index fb5733c85b5..6a03a5149d3 100644 --- a/src/ansys/dpf/core/operators/utility/extract_sub_fc.py +++ b/src/ansys/dpf/core/operators/utility/extract_sub_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class extract_sub_fc(Operator): r"""Creates a new fields container with all the fields corresponding to the @@ -183,15 +188,21 @@ class InputsExtractSubFc(_Inputs): def __init__(self, op: Operator): super().__init__(extract_sub_fc._spec().inputs, op) - self._fields_container = Input(extract_sub_fc._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + extract_sub_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._label_space = Input(extract_sub_fc._spec().input_pin(1), 1, op, -1) + self._label_space: Input[dict | Scoping] = Input( + extract_sub_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._label_space) - self._collapse_labels = Input(extract_sub_fc._spec().input_pin(2), 2, op, -1) + self._collapse_labels: Input[bool] = Input( + extract_sub_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._collapse_labels) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. fields_container @@ -212,7 +223,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label_space(self) -> Input: + def label_space(self) -> Input[dict | Scoping]: r"""Allows to connect label_space input to the operator. Label space, or scoping defining the label space (scoping location), values to keep (scoping IDs) @@ -233,7 +244,7 @@ def label_space(self) -> Input: return self._label_space @property - def collapse_labels(self) -> Input: + def collapse_labels(self) -> Input[bool]: r"""Allows to connect collapse_labels input to the operator. If set to true (default) the input label space (scoping location) is suppressed from the output fields container, otherwise, label space is kept. @@ -268,11 +279,13 @@ class OutputsExtractSubFc(_Outputs): def __init__(self, op: Operator): super().__init__(extract_sub_fc._spec().outputs, op) - self._fields_container = Output(extract_sub_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + extract_sub_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator fields_container diff --git a/src/ansys/dpf/core/operators/utility/extract_sub_mc.py b/src/ansys/dpf/core/operators/utility/extract_sub_mc.py index 363b6e71a5a..3d89b852ffb 100644 --- a/src/ansys/dpf/core/operators/utility/extract_sub_mc.py +++ b/src/ansys/dpf/core/operators/utility/extract_sub_mc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshes_container import MeshesContainer + from ansys.dpf.core.scoping import Scoping + class extract_sub_mc(Operator): r"""Creates a new meshes container with all the meshed regions corresponding @@ -183,15 +188,21 @@ class InputsExtractSubMc(_Inputs): def __init__(self, op: Operator): super().__init__(extract_sub_mc._spec().inputs, op) - self._meshes = Input(extract_sub_mc._spec().input_pin(0), 0, op, -1) + self._meshes: Input[MeshesContainer] = Input( + extract_sub_mc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._meshes) - self._label_space = Input(extract_sub_mc._spec().input_pin(1), 1, op, -1) + self._label_space: Input[dict | Scoping] = Input( + extract_sub_mc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._label_space) - self._collapse_labels = Input(extract_sub_mc._spec().input_pin(2), 2, op, -1) + self._collapse_labels: Input[bool] = Input( + extract_sub_mc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._collapse_labels) @property - def meshes(self) -> Input: + def meshes(self) -> Input[MeshesContainer]: r"""Allows to connect meshes input to the operator. meshes @@ -212,7 +223,7 @@ def meshes(self) -> Input: return self._meshes @property - def label_space(self) -> Input: + def label_space(self) -> Input[dict | Scoping]: r"""Allows to connect label_space input to the operator. Label space, or scoping defining the label space (scoping location), values to keep (scoping IDs) @@ -233,7 +244,7 @@ def label_space(self) -> Input: return self._label_space @property - def collapse_labels(self) -> Input: + def collapse_labels(self) -> Input[bool]: r"""Allows to connect collapse_labels input to the operator. If set to true (default) the input label space (scoping location) is suppressed from the output meshes container, otherwise, label space is kept. @@ -268,11 +279,13 @@ class OutputsExtractSubMc(_Outputs): def __init__(self, op: Operator): super().__init__(extract_sub_mc._spec().outputs, op) - self._meshes_container = Output(extract_sub_mc._spec().output_pin(0), 0, op) + self._meshes_container: Output[MeshesContainer] = Output( + extract_sub_mc._spec().output_pin(0), 0, op + ) self._outputs.append(self._meshes_container) @property - def meshes_container(self) -> Output: + def meshes_container(self) -> Output[MeshesContainer]: r"""Allows to get meshes_container output of the operator meshes diff --git a/src/ansys/dpf/core/operators/utility/extract_sub_sc.py b/src/ansys/dpf/core/operators/utility/extract_sub_sc.py index 2d8c9692e42..5923cb1c021 100644 --- a/src/ansys/dpf/core/operators/utility/extract_sub_sc.py +++ b/src/ansys/dpf/core/operators/utility/extract_sub_sc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class extract_sub_sc(Operator): r"""Creates a new scopings container with all the scopings corresponding to @@ -183,15 +188,21 @@ class InputsExtractSubSc(_Inputs): def __init__(self, op: Operator): super().__init__(extract_sub_sc._spec().inputs, op) - self._scopings_container = Input(extract_sub_sc._spec().input_pin(0), 0, op, -1) + self._scopings_container: Input[ScopingsContainer] = Input( + extract_sub_sc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._scopings_container) - self._label_space = Input(extract_sub_sc._spec().input_pin(1), 1, op, -1) + self._label_space: Input[dict | Scoping] = Input( + extract_sub_sc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._label_space) - self._collapse_labels = Input(extract_sub_sc._spec().input_pin(2), 2, op, -1) + self._collapse_labels: Input[bool] = Input( + extract_sub_sc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._collapse_labels) @property - def scopings_container(self) -> Input: + def scopings_container(self) -> Input[ScopingsContainer]: r"""Allows to connect scopings_container input to the operator. scopings_container @@ -212,7 +223,7 @@ def scopings_container(self) -> Input: return self._scopings_container @property - def label_space(self) -> Input: + def label_space(self) -> Input[dict | Scoping]: r"""Allows to connect label_space input to the operator. Label space, or scoping defining the label space (scoping location), values to keep (scoping IDs) @@ -233,7 +244,7 @@ def label_space(self) -> Input: return self._label_space @property - def collapse_labels(self) -> Input: + def collapse_labels(self) -> Input[bool]: r"""Allows to connect collapse_labels input to the operator. If set to true (default) the input label space (scoping location) is suppressed from the output scopings container, otherwise, label space is kept. @@ -268,11 +279,13 @@ class OutputsExtractSubSc(_Outputs): def __init__(self, op: Operator): super().__init__(extract_sub_sc._spec().outputs, op) - self._scopings_container = Output(extract_sub_sc._spec().output_pin(0), 0, op) + self._scopings_container: Output[ScopingsContainer] = Output( + extract_sub_sc._spec().output_pin(0), 0, op + ) self._outputs.append(self._scopings_container) @property - def scopings_container(self) -> Output: + def scopings_container(self) -> Output[ScopingsContainer]: r"""Allows to get scopings_container output of the operator scopings_container diff --git a/src/ansys/dpf/core/operators/utility/extract_time_freq.py b/src/ansys/dpf/core/operators/utility/extract_time_freq.py index a021928452b..d27b1c0ad58 100644 --- a/src/ansys/dpf/core/operators/utility/extract_time_freq.py +++ b/src/ansys/dpf/core/operators/utility/extract_time_freq.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class extract_time_freq(Operator): r"""Extract timefreqs with sets scoping from a time freq support @@ -177,17 +182,21 @@ class InputsExtractTimeFreq(_Inputs): def __init__(self, op: Operator): super().__init__(extract_time_freq._spec().inputs, op) - self._time_freq_support = Input( + self._time_freq_support: Input[TimeFreqSupport] = Input( extract_time_freq._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_freq_support) - self._time_scoping = Input(extract_time_freq._spec().input_pin(1), 1, op, -1) + self._time_scoping: Input[Scoping] = Input( + extract_time_freq._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._time_scoping) - self._real_or_complex = Input(extract_time_freq._spec().input_pin(2), 2, op, -1) + self._real_or_complex: Input[bool] = Input( + extract_time_freq._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._real_or_complex) @property - def time_freq_support(self) -> Input: + def time_freq_support(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_support input to the operator. Returns @@ -206,7 +215,7 @@ def time_freq_support(self) -> Input: return self._time_freq_support @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -225,7 +234,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def real_or_complex(self) -> Input: + def real_or_complex(self) -> Input[bool]: r"""Allows to connect real_or_complex input to the operator. False for real only (default). True for complex output. diff --git a/src/ansys/dpf/core/operators/utility/fc_get_attribute.py b/src/ansys/dpf/core/operators/utility/fc_get_attribute.py index 8f32db13196..a35c2b880d3 100644 --- a/src/ansys/dpf/core/operators/utility/fc_get_attribute.py +++ b/src/ansys/dpf/core/operators/utility/fc_get_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class fc_get_attribute(Operator): r"""Uses the FieldsContainer APIs to return a given attribute of the fields @@ -192,17 +196,21 @@ class InputsFcGetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(fc_get_attribute._spec().inputs, op) - self._fields_container = Input(fc_get_attribute._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + fc_get_attribute._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._property_name = Input(fc_get_attribute._spec().input_pin(1), 1, op, -1) + self._property_name: Input[str] = Input( + fc_get_attribute._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._property_name) - self._property_identifier = Input( + self._property_identifier: Input[str | int] = Input( fc_get_attribute._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._property_identifier) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -221,7 +229,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Supported property names are: "label_scoping", "label_values", "time_freq_support", "labels", "field_scoping" and "field". @@ -242,7 +250,7 @@ def property_name(self) -> Input: return self._property_name @property - def property_identifier(self) -> Input: + def property_identifier(self) -> Input[str | int]: r"""Allows to connect property_identifier input to the operator. Additional pin for some property : the label name for "label_scoping" or "label_values", the field index (default 0) for "field_scoping" or "field". diff --git a/src/ansys/dpf/core/operators/utility/field.py b/src/ansys/dpf/core/operators/utility/field.py index 7401eaf963b..26e776c33d4 100644 --- a/src/ansys/dpf/core/operators/utility/field.py +++ b/src/ansys/dpf/core/operators/utility/field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class field(Operator): r"""Incrementaly merge the input. @@ -139,11 +143,11 @@ class InputsField(_Inputs): def __init__(self, op: Operator): super().__init__(field._spec().inputs, op) - self._input = Input(field._spec().input_pin(0), 0, op, -1) + self._input: Input[Field] = Input(field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._input) @property - def input(self) -> Input: + def input(self) -> Input[Field]: r"""Allows to connect input input to the operator. Returns @@ -176,7 +180,7 @@ class OutputsField(_Outputs): def __init__(self, op: Operator): super().__init__(field._spec().outputs, op) - self._incremented_result = Output(field._spec().output_pin(0), 0, op) + self._incremented_result: Output = Output(field._spec().output_pin(0), 0, op) self._outputs.append(self._incremented_result) @property diff --git a/src/ansys/dpf/core/operators/utility/field_clone_to_shell_layer.py b/src/ansys/dpf/core/operators/utility/field_clone_to_shell_layer.py index 55fb31a8250..143dcea4374 100644 --- a/src/ansys/dpf/core/operators/utility/field_clone_to_shell_layer.py +++ b/src/ansys/dpf/core/operators/utility/field_clone_to_shell_layer.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class field_clone_to_shell_layer(Operator): r"""Generates a Field from the Field in input 0 that has the same @@ -189,19 +193,21 @@ class InputsFieldCloneToShellLayer(_Inputs): def __init__(self, op: Operator): super().__init__(field_clone_to_shell_layer._spec().inputs, op) - self._field = Input(field_clone_to_shell_layer._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input( + field_clone_to_shell_layer._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._shell_layer = Input( + self._shell_layer: Input[int] = Input( field_clone_to_shell_layer._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._shell_layer) - self._duplicate_scoping = Input( + self._duplicate_scoping: Input[bool] = Input( field_clone_to_shell_layer._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._duplicate_scoping) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -220,7 +226,7 @@ def field(self) -> Input: return self._field @property - def shell_layer(self) -> Input: + def shell_layer(self) -> Input[int]: r"""Allows to connect shell_layer input to the operator. 0: Top, 1: Bottom, 2: TopBottom, 3: Mid, 4: TopBottomMid. @@ -241,7 +247,7 @@ def shell_layer(self) -> Input: return self._shell_layer @property - def duplicate_scoping(self) -> Input: + def duplicate_scoping(self) -> Input[bool]: r"""Allows to connect duplicate_scoping input to the operator. If true, a new scoping is computed for the output Field. If false, the input Field scoping is used. Default is false. @@ -276,11 +282,13 @@ class OutputsFieldCloneToShellLayer(_Outputs): def __init__(self, op: Operator): super().__init__(field_clone_to_shell_layer._spec().outputs, op) - self._field = Output(field_clone_to_shell_layer._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + field_clone_to_shell_layer._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/field_get_attribute.py b/src/ansys/dpf/core/operators/utility/field_get_attribute.py index 832946070aa..04af012267b 100644 --- a/src/ansys/dpf/core/operators/utility/field_get_attribute.py +++ b/src/ansys/dpf/core/operators/utility/field_get_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_get_attribute(Operator): r"""Gets a property from an input field/field container. A Fieldin pin 0, a @@ -164,13 +169,17 @@ class InputsFieldGetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(field_get_attribute._spec().inputs, op) - self._field = Input(field_get_attribute._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + field_get_attribute._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._property_name = Input(field_get_attribute._spec().input_pin(1), 1, op, -1) + self._property_name: Input[str] = Input( + field_get_attribute._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._property_name) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns @@ -189,7 +198,7 @@ def field(self) -> Input: return self._field @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Property to get. Accepted inputs are specific strings namely: 'unit, 'name','time_freq_support', 'scoping' and 'header'. diff --git a/src/ansys/dpf/core/operators/utility/field_to_fc.py b/src/ansys/dpf/core/operators/utility/field_to_fc.py index 4f53cc81f97..e1beb7e81dd 100644 --- a/src/ansys/dpf/core/operators/utility/field_to_fc.py +++ b/src/ansys/dpf/core/operators/utility/field_to_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class field_to_fc(Operator): r"""Creates a fields container containing the field provided on pin 0. @@ -156,13 +161,15 @@ class InputsFieldToFc(_Inputs): def __init__(self, op: Operator): super().__init__(field_to_fc._spec().inputs, op) - self._field = Input(field_to_fc._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + field_to_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._label = Input(field_to_fc._spec().input_pin(1), 1, op, -1) + self._label: Input[dict] = Input(field_to_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._label) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. If a fields container is set in input, it is passed on as an output with the additional label space (if any). @@ -183,7 +190,7 @@ def field(self) -> Input: return self._field @property - def label(self) -> Input: + def label(self) -> Input[dict]: r"""Allows to connect label input to the operator. Sets a label space. @@ -218,11 +225,13 @@ class OutputsFieldToFc(_Outputs): def __init__(self, op: Operator): super().__init__(field_to_fc._spec().outputs, op) - self._fields_container = Output(field_to_fc._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + field_to_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/fields_container.py b/src/ansys/dpf/core/operators/utility/fields_container.py index 164708b3bab..25dfa9e1668 100644 --- a/src/ansys/dpf/core/operators/utility/fields_container.py +++ b/src/ansys/dpf/core/operators/utility/fields_container.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class fields_container(Operator): r"""Incrementaly merge the input. @@ -143,11 +147,13 @@ class InputsFieldsContainer(_Inputs): def __init__(self, op: Operator): super().__init__(fields_container._spec().inputs, op) - self._input = Input(fields_container._spec().input_pin(0), 0, op, -1) + self._input: Input[FieldsContainer] = Input( + fields_container._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._input) @property - def input(self) -> Input: + def input(self) -> Input[FieldsContainer]: r"""Allows to connect input input to the operator. Returns @@ -180,7 +186,9 @@ class OutputsFieldsContainer(_Outputs): def __init__(self, op: Operator): super().__init__(fields_container._spec().outputs, op) - self._incremented_result = Output(fields_container._spec().output_pin(0), 0, op) + self._incremented_result: Output = Output( + fields_container._spec().output_pin(0), 0, op + ) self._outputs.append(self._incremented_result) @property diff --git a/src/ansys/dpf/core/operators/utility/fields_container_matrices_label.py b/src/ansys/dpf/core/operators/utility/fields_container_matrices_label.py index 4ab8754d2e0..1c895e47246 100644 --- a/src/ansys/dpf/core/operators/utility/fields_container_matrices_label.py +++ b/src/ansys/dpf/core/operators/utility/fields_container_matrices_label.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + class fields_container_matrices_label(Operator): r"""Merge fields of fields container into field matrices. The output is a @@ -185,21 +190,21 @@ class InputsFieldsContainerMatricesLabel(_Inputs): def __init__(self, op: Operator): super().__init__(fields_container_matrices_label._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( fields_container_matrices_label._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._label = Input( + self._label: Input[str] = Input( fields_container_matrices_label._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._label) - self._time_scoping = Input( + self._time_scoping: Input[int | Scoping] = Input( fields_container_matrices_label._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._time_scoping) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. fields container to be merged @@ -220,7 +225,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label(self) -> Input: + def label(self) -> Input[str]: r"""Allows to connect label input to the operator. Label where the merge is required @@ -241,7 +246,7 @@ def label(self) -> Input: return self._label @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[int | Scoping]: r"""Allows to connect time_scoping input to the operator. if it's specified, fields container of field matrices is constructed only on the specified time scoping @@ -276,13 +281,13 @@ class OutputsFieldsContainerMatricesLabel(_Outputs): def __init__(self, op: Operator): super().__init__(fields_container_matrices_label._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( fields_container_matrices_label._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator fields container of field matrices obtained after merging. diff --git a/src/ansys/dpf/core/operators/utility/for_each.py b/src/ansys/dpf/core/operators/utility/for_each.py index f2500241c05..3d40c507ab6 100644 --- a/src/ansys/dpf/core/operators/utility/for_each.py +++ b/src/ansys/dpf/core/operators/utility/for_each.py @@ -219,15 +219,15 @@ class InputsForEach(_Inputs): def __init__(self, op: Operator): super().__init__(for_each._spec().inputs, op) - self._iterable = Input(for_each._spec().input_pin(0), 0, op, -1) + self._iterable: Input = Input(for_each._spec().input_pin(0), 0, op, -1) self._inputs.append(self._iterable) - self._iterable_values = Input(for_each._spec().input_pin(1), 1, op, -1) + self._iterable_values: Input = Input(for_each._spec().input_pin(1), 1, op, -1) self._inputs.append(self._iterable_values) - self._pin_index = Input(for_each._spec().input_pin(2), 2, op, -1) + self._pin_index: Input[int] = Input(for_each._spec().input_pin(2), 2, op, -1) self._inputs.append(self._pin_index) - self._forward1 = Input(for_each._spec().input_pin(3), 3, op, 0) + self._forward1: Input = Input(for_each._spec().input_pin(3), 3, op, 0) self._inputs.append(self._forward1) - self._forward2 = Input(for_each._spec().input_pin(4), 4, op, 1) + self._forward2: Input = Input(for_each._spec().input_pin(4), 4, op, 1) self._inputs.append(self._forward2) @property @@ -271,7 +271,7 @@ def iterable_values(self) -> Input: return self._iterable_values @property - def pin_index(self) -> Input: + def pin_index(self) -> Input[int]: r"""Allows to connect pin_index input to the operator. Returns @@ -344,11 +344,11 @@ class OutputsForEach(_Outputs): def __init__(self, op: Operator): super().__init__(for_each._spec().outputs, op) - self._empty = Output(for_each._spec().output_pin(0), 0, op) + self._empty: Output = Output(for_each._spec().output_pin(0), 0, op) self._outputs.append(self._empty) - self._output1 = Output(for_each._spec().output_pin(3), 3, op) + self._output1: Output = Output(for_each._spec().output_pin(3), 3, op) self._outputs.append(self._output1) - self._output2 = Output(for_each._spec().output_pin(4), 4, op) + self._output2: Output = Output(for_each._spec().output_pin(4), 4, op) self._outputs.append(self._output2) @property diff --git a/src/ansys/dpf/core/operators/utility/forward.py b/src/ansys/dpf/core/operators/utility/forward.py index 0396ce1157b..fa04e2b7249 100644 --- a/src/ansys/dpf/core/operators/utility/forward.py +++ b/src/ansys/dpf/core/operators/utility/forward.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + class forward(Operator): r"""Return all the inputs as outputs. @@ -142,11 +146,11 @@ class InputsForward(_Inputs): def __init__(self, op: Operator): super().__init__(forward._spec().inputs, op) - self._any = Input(forward._spec().input_pin(0), 0, op, -1) + self._any: Input[Any] = Input(forward._spec().input_pin(0), 0, op, -1) self._inputs.append(self._any) @property - def any(self) -> Input: + def any(self) -> Input[Any]: r"""Allows to connect any input to the operator. any type of input @@ -181,11 +185,11 @@ class OutputsForward(_Outputs): def __init__(self, op: Operator): super().__init__(forward._spec().outputs, op) - self._any = Output(forward._spec().output_pin(0), 0, op) + self._any: Output[Any] = Output(forward._spec().output_pin(0), 0, op) self._outputs.append(self._any) @property - def any(self) -> Output: + def any(self) -> Output[Any]: r"""Allows to get any output of the operator same types as inputs diff --git a/src/ansys/dpf/core/operators/utility/forward_field.py b/src/ansys/dpf/core/operators/utility/forward_field.py index 7080a2d3768..b73c0a0db26 100644 --- a/src/ansys/dpf/core/operators/utility/forward_field.py +++ b/src/ansys/dpf/core/operators/utility/forward_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class forward_field(Operator): r"""Returns the input field or fields container. @@ -141,11 +146,13 @@ class InputsForwardField(_Inputs): def __init__(self, op: Operator): super().__init__(forward_field._spec().inputs, op) - self._field = Input(forward_field._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + forward_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -180,11 +187,11 @@ class OutputsForwardField(_Outputs): def __init__(self, op: Operator): super().__init__(forward_field._spec().outputs, op) - self._field = Output(forward_field._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(forward_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/forward_fields_container.py b/src/ansys/dpf/core/operators/utility/forward_fields_container.py index 94c7bdfa305..083ca95fedd 100644 --- a/src/ansys/dpf/core/operators/utility/forward_fields_container.py +++ b/src/ansys/dpf/core/operators/utility/forward_fields_container.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class forward_fields_container(Operator): r"""Returns the input field or fields container. @@ -140,11 +145,13 @@ class InputsForwardFieldsContainer(_Inputs): def __init__(self, op: Operator): super().__init__(forward_fields_container._spec().inputs, op) - self._fields = Input(forward_fields_container._spec().input_pin(0), 0, op, -1) + self._fields: Input[FieldsContainer | Field] = Input( + forward_fields_container._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields) @property - def fields(self) -> Input: + def fields(self) -> Input[FieldsContainer | Field]: r"""Allows to connect fields input to the operator. Returns @@ -177,13 +184,13 @@ class OutputsForwardFieldsContainer(_Outputs): def __init__(self, op: Operator): super().__init__(forward_fields_container._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( forward_fields_container._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/forward_meshes_container.py b/src/ansys/dpf/core/operators/utility/forward_meshes_container.py index bbfee1cfd02..2b7b4c20af0 100644 --- a/src/ansys/dpf/core/operators/utility/forward_meshes_container.py +++ b/src/ansys/dpf/core/operators/utility/forward_meshes_container.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + class forward_meshes_container(Operator): r"""Returns the input mesh or meshes container into a meshes container. @@ -155,15 +160,17 @@ class InputsForwardMeshesContainer(_Inputs): def __init__(self, op: Operator): super().__init__(forward_meshes_container._spec().inputs, op) - self._meshes = Input(forward_meshes_container._spec().input_pin(0), 0, op, -1) + self._meshes: Input[MeshesContainer | MeshedRegion] = Input( + forward_meshes_container._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._meshes) - self._default_label = Input( + self._default_label: Input[str] = Input( forward_meshes_container._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._default_label) @property - def meshes(self) -> Input: + def meshes(self) -> Input[MeshesContainer | MeshedRegion]: r"""Allows to connect meshes input to the operator. Returns @@ -182,7 +189,7 @@ def meshes(self) -> Input: return self._meshes @property - def default_label(self) -> Input: + def default_label(self) -> Input[str]: r"""Allows to connect default_label input to the operator. this default label is used if a new meshes container needs to be created (default is unknown) @@ -217,13 +224,13 @@ class OutputsForwardMeshesContainer(_Outputs): def __init__(self, op: Operator): super().__init__(forward_meshes_container._spec().outputs, op) - self._meshes_container = Output( + self._meshes_container: Output[MeshesContainer] = Output( forward_meshes_container._spec().output_pin(0), 0, op ) self._outputs.append(self._meshes_container) @property - def meshes_container(self) -> Output: + def meshes_container(self) -> Output[MeshesContainer]: r"""Allows to get meshes_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/hdf5dpf_workglow_provider.py b/src/ansys/dpf/core/operators/utility/hdf5dpf_workglow_provider.py index 61b14bc61fa..f2989699828 100644 --- a/src/ansys/dpf/core/operators/utility/hdf5dpf_workglow_provider.py +++ b/src/ansys/dpf/core/operators/utility/hdf5dpf_workglow_provider.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.data_tree import DataTree + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.workflow import Workflow + class hdf5dpf_workglow_provider(Operator): r"""Extract a custom result from an hdf5dpf file as an executable workflow. @@ -228,31 +236,33 @@ class InputsHdf5DpfWorkglowProvider(_Inputs): def __init__(self, op: Operator): super().__init__(hdf5dpf_workglow_provider._spec().inputs, op) - self._time_scoping = Input( + self._time_scoping: Input[Scoping] = Input( hdf5dpf_workglow_provider._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( + self._mesh_scoping: Input[Scoping] = Input( hdf5dpf_workglow_provider._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._mesh_scoping) - self._streams = Input(hdf5dpf_workglow_provider._spec().input_pin(3), 3, op, -1) + self._streams: Input[StreamsContainer] = Input( + hdf5dpf_workglow_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams) - self._data_sources = Input( + self._data_sources: Input[DataSources] = Input( hdf5dpf_workglow_provider._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources) - self._meta_data = Input( + self._meta_data: Input[DataTree] = Input( hdf5dpf_workglow_provider._spec().input_pin(24), 24, op, -1 ) self._inputs.append(self._meta_data) - self._result_name = Input( + self._result_name: Input = Input( hdf5dpf_workglow_provider._spec().input_pin(60), 60, op, -1 ) self._inputs.append(self._result_name) @property - def time_scoping(self) -> Input: + def time_scoping(self) -> Input[Scoping]: r"""Allows to connect time_scoping input to the operator. Returns @@ -271,7 +281,7 @@ def time_scoping(self) -> Input: return self._time_scoping @property - def mesh_scoping(self) -> Input: + def mesh_scoping(self) -> Input[Scoping]: r"""Allows to connect mesh_scoping input to the operator. Returns @@ -290,7 +300,7 @@ def mesh_scoping(self) -> Input: return self._mesh_scoping @property - def streams(self) -> Input: + def streams(self) -> Input[StreamsContainer]: r"""Allows to connect streams input to the operator. Hdf5df file stream. @@ -311,7 +321,7 @@ def streams(self) -> Input: return self._streams @property - def data_sources(self) -> Input: + def data_sources(self) -> Input[DataSources]: r"""Allows to connect data_sources input to the operator. Hdf5df file data source. @@ -332,7 +342,7 @@ def data_sources(self) -> Input: return self._data_sources @property - def meta_data(self) -> Input: + def meta_data(self) -> Input[DataTree]: r"""Allows to connect meta_data input to the operator. meta_data that may be used to evaluate results or extract workflows. @@ -388,13 +398,13 @@ class OutputsHdf5DpfWorkglowProvider(_Outputs): def __init__(self, op: Operator): super().__init__(hdf5dpf_workglow_provider._spec().outputs, op) - self._field_or_fields_container = Output( + self._field_or_fields_container: Output[Workflow] = Output( hdf5dpf_workglow_provider._spec().output_pin(0), 0, op ) self._outputs.append(self._field_or_fields_container) @property - def field_or_fields_container(self) -> Output: + def field_or_fields_container(self) -> Output[Workflow]: r"""Allows to get field_or_fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/html_doc.py b/src/ansys/dpf/core/operators/utility/html_doc.py index 5f191637c5d..b1814274da3 100644 --- a/src/ansys/dpf/core/operators/utility/html_doc.py +++ b/src/ansys/dpf/core/operators/utility/html_doc.py @@ -146,13 +146,15 @@ class InputsHtmlDoc(_Inputs): def __init__(self, op: Operator): super().__init__(html_doc._spec().inputs, op) - self._output_path = Input(html_doc._spec().input_pin(0), 0, op, -1) + self._output_path: Input[str] = Input(html_doc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._output_path) - self._exposure_level = Input(html_doc._spec().input_pin(1), 1, op, -1) + self._exposure_level: Input[int] = Input( + html_doc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._exposure_level) @property - def output_path(self) -> Input: + def output_path(self) -> Input[str]: r"""Allows to connect output_path input to the operator. default is {working directory}/dataProcessingDoc.html @@ -173,7 +175,7 @@ def output_path(self) -> Input: return self._output_path @property - def exposure_level(self) -> Input: + def exposure_level(self) -> Input[int]: r"""Allows to connect exposure_level input to the operator. Generate the documentation depending on exposure level : 0 (default) for public operators, 1 includes hidden operator, 2 includes private operator, 3 includes operator without specifications. diff --git a/src/ansys/dpf/core/operators/utility/incremental_concatenate_as_fc.py b/src/ansys/dpf/core/operators/utility/incremental_concatenate_as_fc.py index 19de953288e..c926dfd10ea 100644 --- a/src/ansys/dpf/core/operators/utility/incremental_concatenate_as_fc.py +++ b/src/ansys/dpf/core/operators/utility/incremental_concatenate_as_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class incremental_concatenate_as_fc(Operator): r"""Assemble fields in a fields container. @@ -159,17 +164,17 @@ class InputsIncrementalConcatenateAsFc(_Inputs): def __init__(self, op: Operator): super().__init__(incremental_concatenate_as_fc._spec().inputs, op) - self._field = Input( + self._field: Input[Field] = Input( incremental_concatenate_as_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._field) - self._label = Input( + self._label: Input[dict] = Input( incremental_concatenate_as_fc._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._label) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -188,7 +193,7 @@ def field(self) -> Input: return self._field @property - def label(self) -> Input: + def label(self) -> Input[dict]: r"""Allows to connect label input to the operator. Label space value that must be applied to the added field. @@ -223,13 +228,13 @@ class OutputsIncrementalConcatenateAsFc(_Outputs): def __init__(self, op: Operator): super().__init__(incremental_concatenate_as_fc._spec().outputs, op) - self._output = Output( + self._output: Output[FieldsContainer] = Output( incremental_concatenate_as_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._output) @property - def output(self) -> Output: + def output(self) -> Output[FieldsContainer]: r"""Allows to get output output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/ints_to_scoping.py b/src/ansys/dpf/core/operators/utility/ints_to_scoping.py index 9ecd5f36254..191b7cb8ff5 100644 --- a/src/ansys/dpf/core/operators/utility/ints_to_scoping.py +++ b/src/ansys/dpf/core/operators/utility/ints_to_scoping.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scoping import Scoping + class ints_to_scoping(Operator): r"""take a int or a vector of int and transform it in a one entity field of @@ -179,15 +183,21 @@ class InputsIntsToScoping(_Inputs): def __init__(self, op: Operator): super().__init__(ints_to_scoping._spec().inputs, op) - self._int_or_vector_int = Input(ints_to_scoping._spec().input_pin(0), 0, op, -1) + self._int_or_vector_int: Input[int | Scoping] = Input( + ints_to_scoping._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._int_or_vector_int) - self._location = Input(ints_to_scoping._spec().input_pin(1), 1, op, -1) + self._location: Input[str] = Input( + ints_to_scoping._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._location) - self._upper_bound = Input(ints_to_scoping._spec().input_pin(2), 2, op, -1) + self._upper_bound: Input[int | Scoping] = Input( + ints_to_scoping._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._upper_bound) @property - def int_or_vector_int(self) -> Input: + def int_or_vector_int(self) -> Input[int | Scoping]: r"""Allows to connect int_or_vector_int input to the operator. int or single value scoping or vector of int @@ -208,7 +218,7 @@ def int_or_vector_int(self) -> Input: return self._int_or_vector_int @property - def location(self) -> Input: + def location(self) -> Input[str]: r"""Allows to connect location input to the operator. Returns @@ -227,7 +237,7 @@ def location(self) -> Input: return self._location @property - def upper_bound(self) -> Input: + def upper_bound(self) -> Input[int | Scoping]: r"""Allows to connect upper_bound input to the operator. Define the upper bound to create a scoping that will contain a range from the single value input in pin 0 to the upper bound defined in this pin. @@ -262,11 +272,13 @@ class OutputsIntsToScoping(_Outputs): def __init__(self, op: Operator): super().__init__(ints_to_scoping._spec().outputs, op) - self._scoping = Output(ints_to_scoping._spec().output_pin(0), 0, op) + self._scoping: Output[Scoping] = Output( + ints_to_scoping._spec().output_pin(0), 0, op + ) self._outputs.append(self._scoping) @property - def scoping(self) -> Output: + def scoping(self) -> Output[Scoping]: r"""Allows to get scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/make_for_each_range.py b/src/ansys/dpf/core/operators/utility/make_for_each_range.py index 32554852846..d1622f0ee49 100644 --- a/src/ansys/dpf/core/operators/utility/make_for_each_range.py +++ b/src/ansys/dpf/core/operators/utility/make_for_each_range.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.dpf_operator import Operator + class make_for_each_range(Operator): r"""Generate a range that can be consumed by the for_each operator @@ -252,29 +256,33 @@ class InputsMakeForEachRange(_Inputs): def __init__(self, op: Operator): super().__init__(make_for_each_range._spec().inputs, op) - self._try_generate_iterable = Input( + self._try_generate_iterable: Input[bool] = Input( make_for_each_range._spec().input_pin(-1), -1, op, -1 ) self._inputs.append(self._try_generate_iterable) - self._iterable = Input(make_for_each_range._spec().input_pin(0), 0, op, -1) + self._iterable: Input = Input( + make_for_each_range._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._iterable) - self._operator_to_iterate = Input( + self._operator_to_iterate: Input[Operator] = Input( make_for_each_range._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._operator_to_iterate) - self._pin_index = Input(make_for_each_range._spec().input_pin(2), 2, op, -1) + self._pin_index: Input[int] = Input( + make_for_each_range._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._pin_index) - self._valueA = Input(make_for_each_range._spec().input_pin(3), 3, op, -1) + self._valueA: Input = Input(make_for_each_range._spec().input_pin(3), 3, op, -1) self._inputs.append(self._valueA) - self._valueB = Input(make_for_each_range._spec().input_pin(4), 4, op, -1) + self._valueB: Input = Input(make_for_each_range._spec().input_pin(4), 4, op, -1) self._inputs.append(self._valueB) - self._valueC1 = Input(make_for_each_range._spec().input_pin(5), 5, op, 0) + self._valueC1: Input = Input(make_for_each_range._spec().input_pin(5), 5, op, 0) self._inputs.append(self._valueC1) - self._valueC2 = Input(make_for_each_range._spec().input_pin(6), 6, op, 1) + self._valueC2: Input = Input(make_for_each_range._spec().input_pin(6), 6, op, 1) self._inputs.append(self._valueC2) @property - def try_generate_iterable(self) -> Input: + def try_generate_iterable(self) -> Input[bool]: r"""Allows to connect try_generate_iterable input to the operator. if true, already iterable values connected in pin 3 like vectors, Scoping, TimefreqSupport, Containers and DataSources are split to iterate on it (default is true) @@ -316,7 +324,7 @@ def iterable(self) -> Input: return self._iterable @property - def operator_to_iterate(self) -> Input: + def operator_to_iterate(self) -> Input[Operator]: r"""Allows to connect operator_to_iterate input to the operator. Operator that must be reconnected with the range values. @@ -337,7 +345,7 @@ def operator_to_iterate(self) -> Input: return self._operator_to_iterate @property - def pin_index(self) -> Input: + def pin_index(self) -> Input[int]: r"""Allows to connect pin_index input to the operator. Returns @@ -446,7 +454,7 @@ class OutputsMakeForEachRange(_Outputs): def __init__(self, op: Operator): super().__init__(make_for_each_range._spec().outputs, op) - self._output = Output(make_for_each_range._spec().output_pin(0), 0, op) + self._output: Output = Output(make_for_each_range._spec().output_pin(0), 0, op) self._outputs.append(self._output) @property diff --git a/src/ansys/dpf/core/operators/utility/make_label_space.py b/src/ansys/dpf/core/operators/utility/make_label_space.py index 3368237641d..88f2173fbbb 100644 --- a/src/ansys/dpf/core/operators/utility/make_label_space.py +++ b/src/ansys/dpf/core/operators/utility/make_label_space.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scopings_container import ScopingsContainer + class make_label_space(Operator): r"""Assemble strings and integers to make a label space. @@ -195,17 +200,25 @@ class InputsMakeLabelSpace(_Inputs): def __init__(self, op: Operator): super().__init__(make_label_space._spec().inputs, op) - self._base_label = Input(make_label_space._spec().input_pin(0), 0, op, -1) + self._base_label: Input[dict | FieldsContainer | ScopingsContainer] = Input( + make_label_space._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._base_label) - self._label_name = Input(make_label_space._spec().input_pin(1), 1, op, -1) + self._label_name: Input[str] = Input( + make_label_space._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._label_name) - self._label_value1 = Input(make_label_space._spec().input_pin(2), 2, op, 0) + self._label_value1: Input[int] = Input( + make_label_space._spec().input_pin(2), 2, op, 0 + ) self._inputs.append(self._label_value1) - self._label_value2 = Input(make_label_space._spec().input_pin(3), 3, op, 1) + self._label_value2: Input[int] = Input( + make_label_space._spec().input_pin(3), 3, op, 1 + ) self._inputs.append(self._label_value2) @property - def base_label(self) -> Input: + def base_label(self) -> Input[dict | FieldsContainer | ScopingsContainer]: r"""Allows to connect base_label input to the operator. Used as a base label (extracted from Fields/Scoping Container, or directly from Label Space) that is concatenated with provided values. @@ -226,7 +239,7 @@ def base_label(self) -> Input: return self._base_label @property - def label_name(self) -> Input: + def label_name(self) -> Input[str]: r"""Allows to connect label_name input to the operator. Returns @@ -245,7 +258,7 @@ def label_name(self) -> Input: return self._label_name @property - def label_value1(self) -> Input: + def label_value1(self) -> Input[int]: r"""Allows to connect label_value1 input to the operator. Returns @@ -264,7 +277,7 @@ def label_value1(self) -> Input: return self._label_value1 @property - def label_value2(self) -> Input: + def label_value2(self) -> Input[int]: r"""Allows to connect label_value2 input to the operator. Returns @@ -297,11 +310,13 @@ class OutputsMakeLabelSpace(_Outputs): def __init__(self, op: Operator): super().__init__(make_label_space._spec().outputs, op) - self._label = Output(make_label_space._spec().output_pin(0), 0, op) + self._label: Output[dict] = Output( + make_label_space._spec().output_pin(0), 0, op + ) self._outputs.append(self._label) @property - def label(self) -> Output: + def label(self) -> Output[dict]: r"""Allows to get label output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/make_overall.py b/src/ansys/dpf/core/operators/utility/make_overall.py index abaab362aa8..68090b65b33 100644 --- a/src/ansys/dpf/core/operators/utility/make_overall.py +++ b/src/ansys/dpf/core/operators/utility/make_overall.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class make_overall(Operator): r"""Extracts a value from a field and makes a new field containing only this @@ -156,13 +160,13 @@ class InputsMakeOverall(_Inputs): def __init__(self, op: Operator): super().__init__(make_overall._spec().inputs, op) - self._field = Input(make_overall._spec().input_pin(0), 0, op, -1) + self._field: Input[Field] = Input(make_overall._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._id = Input(make_overall._spec().input_pin(1), 1, op, -1) + self._id: Input[int] = Input(make_overall._spec().input_pin(1), 1, op, -1) self._inputs.append(self._id) @property - def field(self) -> Input: + def field(self) -> Input[Field]: r"""Allows to connect field input to the operator. Returns @@ -181,7 +185,7 @@ def field(self) -> Input: return self._field @property - def id(self) -> Input: + def id(self) -> Input[int]: r"""Allows to connect id input to the operator. Returns @@ -214,11 +218,11 @@ class OutputsMakeOverall(_Outputs): def __init__(self, op: Operator): super().__init__(make_overall._spec().outputs, op) - self._field = Output(make_overall._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(make_overall._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/make_producer_consumer_for_each_iterator.py b/src/ansys/dpf/core/operators/utility/make_producer_consumer_for_each_iterator.py index 813b55db086..2f8539a01dc 100644 --- a/src/ansys/dpf/core/operators/utility/make_producer_consumer_for_each_iterator.py +++ b/src/ansys/dpf/core/operators/utility/make_producer_consumer_for_each_iterator.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.dpf_operator import Operator + class make_producer_consumer_for_each_iterator(Operator): r"""Generates an iterator that can be consumed by the for_each operator.The @@ -385,88 +389,88 @@ class InputsMakeProducerConsumerForEachIterator(_Inputs): def __init__(self, op: Operator): super().__init__(make_producer_consumer_for_each_iterator._spec().inputs, op) - self._try_generate_iterable = Input( + self._try_generate_iterable: Input[bool] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(-1), -1, op, -1 ) self._inputs.append(self._try_generate_iterable) - self._iterable = Input( + self._iterable: Input = Input( make_producer_consumer_for_each_iterator._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._iterable) - self._operator_to_iterate = Input( + self._operator_to_iterate: Input[Operator] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._operator_to_iterate) - self._pin_index = Input( + self._pin_index: Input[int] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._pin_index) - self._valueA = Input( + self._valueA: Input = Input( make_producer_consumer_for_each_iterator._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._valueA) - self._valueB = Input( + self._valueB: Input = Input( make_producer_consumer_for_each_iterator._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._valueB) - self._valueC1 = Input( + self._valueC1: Input = Input( make_producer_consumer_for_each_iterator._spec().input_pin(5), 5, op, 0 ) self._inputs.append(self._valueC1) - self._valueC2 = Input( + self._valueC2: Input = Input( make_producer_consumer_for_each_iterator._spec().input_pin(6), 6, op, 1 ) self._inputs.append(self._valueC2) - self._producer_op11 = Input( + self._producer_op11: Input[Operator] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(1000), 1000, op, 0, ) self._inputs.append(self._producer_op11) - self._producer_op12 = Input( + self._producer_op12: Input[Operator] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(1001), 1001, op, 1, ) self._inputs.append(self._producer_op12) - self._output_pin_of_producer_op11 = Input( + self._output_pin_of_producer_op11: Input[int] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(1001), 1001, op, 0, ) self._inputs.append(self._output_pin_of_producer_op11) - self._output_pin_of_producer_op12 = Input( + self._output_pin_of_producer_op12: Input[int] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(1002), 1002, op, 1, ) self._inputs.append(self._output_pin_of_producer_op12) - self._input_pin_of_consumer_op11 = Input( + self._input_pin_of_consumer_op11: Input[int] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(1002), 1002, op, 0, ) self._inputs.append(self._input_pin_of_consumer_op11) - self._input_pin_of_consumer_op12 = Input( + self._input_pin_of_consumer_op12: Input[int] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(1003), 1003, op, 1, ) self._inputs.append(self._input_pin_of_consumer_op12) - self._consumer_op11 = Input( + self._consumer_op11: Input[Operator] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(1003), 1003, op, 0, ) self._inputs.append(self._consumer_op11) - self._consumer_op12 = Input( + self._consumer_op12: Input[Operator] = Input( make_producer_consumer_for_each_iterator._spec().input_pin(1004), 1004, op, @@ -475,7 +479,7 @@ def __init__(self, op: Operator): self._inputs.append(self._consumer_op12) @property - def try_generate_iterable(self) -> Input: + def try_generate_iterable(self) -> Input[bool]: r"""Allows to connect try_generate_iterable input to the operator. if true, already iterable values connected in pin 3 like vectors, Scoping, TimefreqSupport, Containers and DataSources are split to iterate on it (default is true) @@ -517,7 +521,7 @@ def iterable(self) -> Input: return self._iterable @property - def operator_to_iterate(self) -> Input: + def operator_to_iterate(self) -> Input[Operator]: r"""Allows to connect operator_to_iterate input to the operator. Operator that must be reconnected with the range values. @@ -538,7 +542,7 @@ def operator_to_iterate(self) -> Input: return self._operator_to_iterate @property - def pin_index(self) -> Input: + def pin_index(self) -> Input[int]: r"""Allows to connect pin_index input to the operator. Returns @@ -633,7 +637,7 @@ def valueC2(self) -> Input: return self._valueC2 @property - def producer_op11(self) -> Input: + def producer_op11(self) -> Input[Operator]: r"""Allows to connect producer_op11 input to the operator. Returns @@ -652,7 +656,7 @@ def producer_op11(self) -> Input: return self._producer_op11 @property - def producer_op12(self) -> Input: + def producer_op12(self) -> Input[Operator]: r"""Allows to connect producer_op12 input to the operator. Returns @@ -671,7 +675,7 @@ def producer_op12(self) -> Input: return self._producer_op12 @property - def output_pin_of_producer_op11(self) -> Input: + def output_pin_of_producer_op11(self) -> Input[int]: r"""Allows to connect output_pin_of_producer_op11 input to the operator. Returns @@ -690,7 +694,7 @@ def output_pin_of_producer_op11(self) -> Input: return self._output_pin_of_producer_op11 @property - def output_pin_of_producer_op12(self) -> Input: + def output_pin_of_producer_op12(self) -> Input[int]: r"""Allows to connect output_pin_of_producer_op12 input to the operator. Returns @@ -709,7 +713,7 @@ def output_pin_of_producer_op12(self) -> Input: return self._output_pin_of_producer_op12 @property - def input_pin_of_consumer_op11(self) -> Input: + def input_pin_of_consumer_op11(self) -> Input[int]: r"""Allows to connect input_pin_of_consumer_op11 input to the operator. Returns @@ -728,7 +732,7 @@ def input_pin_of_consumer_op11(self) -> Input: return self._input_pin_of_consumer_op11 @property - def input_pin_of_consumer_op12(self) -> Input: + def input_pin_of_consumer_op12(self) -> Input[int]: r"""Allows to connect input_pin_of_consumer_op12 input to the operator. Returns @@ -747,7 +751,7 @@ def input_pin_of_consumer_op12(self) -> Input: return self._input_pin_of_consumer_op12 @property - def consumer_op11(self) -> Input: + def consumer_op11(self) -> Input[Operator]: r"""Allows to connect consumer_op11 input to the operator. Returns @@ -766,7 +770,7 @@ def consumer_op11(self) -> Input: return self._consumer_op11 @property - def consumer_op12(self) -> Input: + def consumer_op12(self) -> Input[Operator]: r"""Allows to connect consumer_op12 input to the operator. Returns @@ -799,7 +803,7 @@ class OutputsMakeProducerConsumerForEachIterator(_Outputs): def __init__(self, op: Operator): super().__init__(make_producer_consumer_for_each_iterator._spec().outputs, op) - self._iterator = Output( + self._iterator: Output = Output( make_producer_consumer_for_each_iterator._spec().output_pin(0), 0, op ) self._outputs.append(self._iterator) diff --git a/src/ansys/dpf/core/operators/utility/merge_any.py b/src/ansys/dpf/core/operators/utility/merge_any.py index c1caea268fa..84c05fc33f9 100644 --- a/src/ansys/dpf/core/operators/utility/merge_any.py +++ b/src/ansys/dpf/core/operators/utility/merge_any.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + class merge_any(Operator): r"""Merges a list of objects having the same data types. Once the data type @@ -160,13 +164,13 @@ class InputsMergeAny(_Inputs): def __init__(self, op: Operator): super().__init__(merge_any._spec().inputs, op) - self._any1 = Input(merge_any._spec().input_pin(0), 0, op, 0) + self._any1: Input[Any] = Input(merge_any._spec().input_pin(0), 0, op, 0) self._inputs.append(self._any1) - self._any2 = Input(merge_any._spec().input_pin(1), 1, op, 1) + self._any2: Input[Any] = Input(merge_any._spec().input_pin(1), 1, op, 1) self._inputs.append(self._any2) @property - def any1(self) -> Input: + def any1(self) -> Input[Any]: r"""Allows to connect any1 input to the operator. Either a vector of objects (sharing the same data types) or objects from pin 0 to ... to merge. Supported types rely on existing type specific merge operators. @@ -187,7 +191,7 @@ def any1(self) -> Input: return self._any1 @property - def any2(self) -> Input: + def any2(self) -> Input[Any]: r"""Allows to connect any2 input to the operator. Either a vector of objects (sharing the same data types) or objects from pin 0 to ... to merge. Supported types rely on existing type specific merge operators. @@ -222,11 +226,11 @@ class OutputsMergeAny(_Outputs): def __init__(self, op: Operator): super().__init__(merge_any._spec().outputs, op) - self._any = Output(merge_any._spec().output_pin(0), 0, op) + self._any: Output[Any] = Output(merge_any._spec().output_pin(0), 0, op) self._outputs.append(self._any) @property - def any(self) -> Output: + def any(self) -> Output[Any]: r"""Allows to get any output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_collections.py b/src/ansys/dpf/core/operators/utility/merge_collections.py index a1fa2789dc8..e945847557d 100644 --- a/src/ansys/dpf/core/operators/utility/merge_collections.py +++ b/src/ansys/dpf/core/operators/utility/merge_collections.py @@ -156,9 +156,13 @@ class InputsMergeCollections(_Inputs): def __init__(self, op: Operator): super().__init__(merge_collections._spec().inputs, op) - self._collections1 = Input(merge_collections._spec().input_pin(0), 0, op, 0) + self._collections1: Input = Input( + merge_collections._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._collections1) - self._collections2 = Input(merge_collections._spec().input_pin(1), 1, op, 1) + self._collections2: Input = Input( + merge_collections._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._collections2) @property @@ -218,7 +222,7 @@ class OutputsMergeCollections(_Outputs): def __init__(self, op: Operator): super().__init__(merge_collections._spec().outputs, op) - self._merged_collections = Output( + self._merged_collections: Output = Output( merge_collections._spec().output_pin(0), 0, op ) self._outputs.append(self._merged_collections) diff --git a/src/ansys/dpf/core/operators/utility/merge_data_tree.py b/src/ansys/dpf/core/operators/utility/merge_data_tree.py index c0876412e27..95afc552d0e 100644 --- a/src/ansys/dpf/core/operators/utility/merge_data_tree.py +++ b/src/ansys/dpf/core/operators/utility/merge_data_tree.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.any import Any + from ansys.dpf.core.data_tree import DataTree + class merge_data_tree(Operator): r"""Merges a list of data trees. Attributes names shouldn’t be shared @@ -158,13 +163,17 @@ class InputsMergeDataTree(_Inputs): def __init__(self, op: Operator): super().__init__(merge_data_tree._spec().inputs, op) - self._data_tree1 = Input(merge_data_tree._spec().input_pin(0), 0, op, 0) + self._data_tree1: Input[DataTree] = Input( + merge_data_tree._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._data_tree1) - self._data_tree2 = Input(merge_data_tree._spec().input_pin(1), 1, op, 1) + self._data_tree2: Input[DataTree] = Input( + merge_data_tree._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._data_tree2) @property - def data_tree1(self) -> Input: + def data_tree1(self) -> Input[DataTree]: r"""Allows to connect data_tree1 input to the operator. Either a vector of data trees or data trees from pin 0 to ... to merge. @@ -185,7 +194,7 @@ def data_tree1(self) -> Input: return self._data_tree1 @property - def data_tree2(self) -> Input: + def data_tree2(self) -> Input[DataTree]: r"""Allows to connect data_tree2 input to the operator. Either a vector of data trees or data trees from pin 0 to ... to merge. @@ -220,11 +229,11 @@ class OutputsMergeDataTree(_Outputs): def __init__(self, op: Operator): super().__init__(merge_data_tree._spec().outputs, op) - self._any = Output(merge_data_tree._spec().output_pin(0), 0, op) + self._any: Output[Any] = Output(merge_data_tree._spec().output_pin(0), 0, op) self._outputs.append(self._any) @property - def any(self) -> Output: + def any(self) -> Output[Any]: r"""Allows to get any output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_fields.py b/src/ansys/dpf/core/operators/utility/merge_fields.py index c710a4c5656..f4601dc1275 100644 --- a/src/ansys/dpf/core/operators/utility/merge_fields.py +++ b/src/ansys/dpf/core/operators/utility/merge_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class merge_fields(Operator): r"""Assembles a set of fields into a unique one. @@ -194,17 +199,25 @@ class InputsMergeFields(_Inputs): def __init__(self, op: Operator): super().__init__(merge_fields._spec().inputs, op) - self._sum_merge = Input(merge_fields._spec().input_pin(-2), -2, op, -1) + self._sum_merge: Input[bool] = Input( + merge_fields._spec().input_pin(-2), -2, op, -1 + ) self._inputs.append(self._sum_merge) - self._merged_support = Input(merge_fields._spec().input_pin(-1), -1, op, -1) + self._merged_support: Input = Input( + merge_fields._spec().input_pin(-1), -1, op, -1 + ) self._inputs.append(self._merged_support) - self._fields1 = Input(merge_fields._spec().input_pin(0), 0, op, 0) + self._fields1: Input[Field | FieldsContainer] = Input( + merge_fields._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._fields1) - self._fields2 = Input(merge_fields._spec().input_pin(1), 1, op, 1) + self._fields2: Input[Field | FieldsContainer] = Input( + merge_fields._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._fields2) @property - def sum_merge(self) -> Input: + def sum_merge(self) -> Input[bool]: r"""Allows to connect sum_merge input to the operator. Default is false. If true, redundant quantities are summed instead of being ignored. @@ -246,7 +259,7 @@ def merged_support(self) -> Input: return self._merged_support @property - def fields1(self) -> Input: + def fields1(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fields1 input to the operator. Either a fields container, a vector of fields to merge, or fields from pin 0 to ... @@ -267,7 +280,7 @@ def fields1(self) -> Input: return self._fields1 @property - def fields2(self) -> Input: + def fields2(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fields2 input to the operator. Either a fields container, a vector of fields to merge, or fields from pin 0 to ... @@ -302,11 +315,13 @@ class OutputsMergeFields(_Outputs): def __init__(self, op: Operator): super().__init__(merge_fields._spec().outputs, op) - self._merged_field = Output(merge_fields._spec().output_pin(0), 0, op) + self._merged_field: Output[Field] = Output( + merge_fields._spec().output_pin(0), 0, op + ) self._outputs.append(self._merged_field) @property - def merged_field(self) -> Output: + def merged_field(self) -> Output[Field]: r"""Allows to get merged_field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_fields_by_label.py b/src/ansys/dpf/core/operators/utility/merge_fields_by_label.py index f6ce973dedc..c1f2c42f43e 100644 --- a/src/ansys/dpf/core/operators/utility/merge_fields_by_label.py +++ b/src/ansys/dpf/core/operators/utility/merge_fields_by_label.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class merge_fields_by_label(Operator): r"""Merges the fields of a fields container that share the same label value. @@ -205,21 +209,25 @@ class InputsMergeFieldsByLabel(_Inputs): def __init__(self, op: Operator): super().__init__(merge_fields_by_label._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( merge_fields_by_label._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._label = Input(merge_fields_by_label._spec().input_pin(1), 1, op, -1) + self._label: Input[str] = Input( + merge_fields_by_label._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._label) - self._merged_field_support = Input( + self._merged_field_support: Input = Input( merge_fields_by_label._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._merged_field_support) - self._sum_merge = Input(merge_fields_by_label._spec().input_pin(3), 3, op, -1) + self._sum_merge: Input[bool] = Input( + merge_fields_by_label._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._sum_merge) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -238,7 +246,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label(self) -> Input: + def label(self) -> Input[str]: r"""Allows to connect label input to the operator. Label identifier that should be merged. @@ -280,7 +288,7 @@ def merged_field_support(self) -> Input: return self._merged_field_support @property - def sum_merge(self) -> Input: + def sum_merge(self) -> Input[bool]: r"""Allows to connect sum_merge input to the operator. Default is false. If true, redundant quantities are summed instead of being ignored. @@ -316,17 +324,17 @@ class OutputsMergeFieldsByLabel(_Outputs): def __init__(self, op: Operator): super().__init__(merge_fields_by_label._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( merge_fields_by_label._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) - self._merged_field_support = Output( + self._merged_field_support: Output = Output( merge_fields_by_label._spec().output_pin(1), 1, op ) self._outputs.append(self._merged_field_support) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_fields_containers.py b/src/ansys/dpf/core/operators/utility/merge_fields_containers.py index 5e832a11a26..1a3108176b2 100644 --- a/src/ansys/dpf/core/operators/utility/merge_fields_containers.py +++ b/src/ansys/dpf/core/operators/utility/merge_fields_containers.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class merge_fields_containers(Operator): r"""Assembles a set of fields containers into a unique one. @@ -233,33 +237,33 @@ class InputsMergeFieldsContainers(_Inputs): def __init__(self, op: Operator): super().__init__(merge_fields_containers._spec().inputs, op) - self._should_merge_named_selections = Input( + self._should_merge_named_selections: Input[bool] = Input( merge_fields_containers._spec().input_pin(-200), -200, op, -1 ) self._inputs.append(self._should_merge_named_selections) - self._sum_merge = Input( + self._sum_merge: Input[bool] = Input( merge_fields_containers._spec().input_pin(-3), -3, op, -1 ) self._inputs.append(self._sum_merge) - self._merged_fields_support = Input( + self._merged_fields_support: Input = Input( merge_fields_containers._spec().input_pin(-2), -2, op, -1 ) self._inputs.append(self._merged_fields_support) - self._merged_fields_containers_support = Input( + self._merged_fields_containers_support: Input = Input( merge_fields_containers._spec().input_pin(-1), -1, op, -1 ) self._inputs.append(self._merged_fields_containers_support) - self._fields_containers1 = Input( + self._fields_containers1: Input[FieldsContainer] = Input( merge_fields_containers._spec().input_pin(0), 0, op, 0 ) self._inputs.append(self._fields_containers1) - self._fields_containers2 = Input( + self._fields_containers2: Input[FieldsContainer] = Input( merge_fields_containers._spec().input_pin(1), 1, op, 1 ) self._inputs.append(self._fields_containers2) @property - def should_merge_named_selections(self) -> Input: + def should_merge_named_selections(self) -> Input[bool]: r"""Allows to connect should_merge_named_selections input to the operator. For some result files (such as RST), the scoping on names selection is duplicated through all the distributed files.If this pin is false, the merging process is skipped. If it is true, this scoping is merged. Default is true. @@ -280,7 +284,7 @@ def should_merge_named_selections(self) -> Input: return self._should_merge_named_selections @property - def sum_merge(self) -> Input: + def sum_merge(self) -> Input[bool]: r"""Allows to connect sum_merge input to the operator. Default is false. If true, redundant quantities are summed instead of being ignored. @@ -343,7 +347,7 @@ def merged_fields_containers_support(self) -> Input: return self._merged_fields_containers_support @property - def fields_containers1(self) -> Input: + def fields_containers1(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containers1 input to the operator. A vector of fields containers to merge or fields containers from pin 0 to ... @@ -364,7 +368,7 @@ def fields_containers1(self) -> Input: return self._fields_containers1 @property - def fields_containers2(self) -> Input: + def fields_containers2(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containers2 input to the operator. A vector of fields containers to merge or fields containers from pin 0 to ... @@ -399,13 +403,13 @@ class OutputsMergeFieldsContainers(_Outputs): def __init__(self, op: Operator): super().__init__(merge_fields_containers._spec().outputs, op) - self._merged_fields_container = Output( + self._merged_fields_container: Output[FieldsContainer] = Output( merge_fields_containers._spec().output_pin(0), 0, op ) self._outputs.append(self._merged_fields_container) @property - def merged_fields_container(self) -> Output: + def merged_fields_container(self) -> Output[FieldsContainer]: r"""Allows to get merged_fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_generic_data_container.py b/src/ansys/dpf/core/operators/utility/merge_generic_data_container.py index 10c07d56604..1da2536f9b5 100644 --- a/src/ansys/dpf/core/operators/utility/merge_generic_data_container.py +++ b/src/ansys/dpf/core/operators/utility/merge_generic_data_container.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.generic_data_container import GenericDataContainer + class merge_generic_data_container(Operator): r"""Merges a list of generic data container. For each data entry, the merge @@ -170,17 +174,17 @@ class InputsMergeGenericDataContainer(_Inputs): def __init__(self, op: Operator): super().__init__(merge_generic_data_container._spec().inputs, op) - self._generic_data_container1 = Input( + self._generic_data_container1: Input[GenericDataContainer] = Input( merge_generic_data_container._spec().input_pin(0), 0, op, 0 ) self._inputs.append(self._generic_data_container1) - self._generic_data_container2 = Input( + self._generic_data_container2: Input[GenericDataContainer] = Input( merge_generic_data_container._spec().input_pin(1), 1, op, 1 ) self._inputs.append(self._generic_data_container2) @property - def generic_data_container1(self) -> Input: + def generic_data_container1(self) -> Input[GenericDataContainer]: r"""Allows to connect generic_data_container1 input to the operator. Either a vector of generic data containers (sharing the same data types) or generic data containers from pin 0 to ... to merge. Supported types rely on existing type specific merge operators. @@ -201,7 +205,7 @@ def generic_data_container1(self) -> Input: return self._generic_data_container1 @property - def generic_data_container2(self) -> Input: + def generic_data_container2(self) -> Input[GenericDataContainer]: r"""Allows to connect generic_data_container2 input to the operator. Either a vector of generic data containers (sharing the same data types) or generic data containers from pin 0 to ... to merge. Supported types rely on existing type specific merge operators. @@ -236,13 +240,13 @@ class OutputsMergeGenericDataContainer(_Outputs): def __init__(self, op: Operator): super().__init__(merge_generic_data_container._spec().outputs, op) - self._generic_data_container = Output( + self._generic_data_container: Output[GenericDataContainer] = Output( merge_generic_data_container._spec().output_pin(0), 0, op ) self._outputs.append(self._generic_data_container) @property - def generic_data_container(self) -> Output: + def generic_data_container(self) -> Output[GenericDataContainer]: r"""Allows to get generic_data_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_materials.py b/src/ansys/dpf/core/operators/utility/merge_materials.py index 4f45add1b4b..3fae66c5315 100644 --- a/src/ansys/dpf/core/operators/utility/merge_materials.py +++ b/src/ansys/dpf/core/operators/utility/merge_materials.py @@ -156,9 +156,9 @@ class InputsMergeMaterials(_Inputs): def __init__(self, op: Operator): super().__init__(merge_materials._spec().inputs, op) - self._materials1 = Input(merge_materials._spec().input_pin(0), 0, op, 0) + self._materials1: Input = Input(merge_materials._spec().input_pin(0), 0, op, 0) self._inputs.append(self._materials1) - self._materials2 = Input(merge_materials._spec().input_pin(1), 1, op, 1) + self._materials2: Input = Input(merge_materials._spec().input_pin(1), 1, op, 1) self._inputs.append(self._materials2) @property @@ -218,7 +218,9 @@ class OutputsMergeMaterials(_Outputs): def __init__(self, op: Operator): super().__init__(merge_materials._spec().outputs, op) - self._merged_materials = Output(merge_materials._spec().output_pin(0), 0, op) + self._merged_materials: Output = Output( + merge_materials._spec().output_pin(0), 0, op + ) self._outputs.append(self._merged_materials) @property diff --git a/src/ansys/dpf/core/operators/utility/merge_meshes.py b/src/ansys/dpf/core/operators/utility/merge_meshes.py index 7512ad00df2..51ca65df89b 100644 --- a/src/ansys/dpf/core/operators/utility/merge_meshes.py +++ b/src/ansys/dpf/core/operators/utility/merge_meshes.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + class merge_meshes(Operator): r"""Take a set of meshes and assemble them in a unique one @@ -244,29 +249,37 @@ class InputsMergeMeshes(_Inputs): def __init__(self, op: Operator): super().__init__(merge_meshes._spec().inputs, op) - self._naive_merge_elements = Input( + self._naive_merge_elements: Input[bool] = Input( merge_meshes._spec().input_pin(-201), -201, op, -1 ) self._inputs.append(self._naive_merge_elements) - self._should_merge_named_selections = Input( + self._should_merge_named_selections: Input[bool] = Input( merge_meshes._spec().input_pin(-200), -200, op, -1 ) self._inputs.append(self._should_merge_named_selections) - self._meshes1 = Input(merge_meshes._spec().input_pin(0), 0, op, 0) + self._meshes1: Input[MeshedRegion | MeshesContainer] = Input( + merge_meshes._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._meshes1) - self._meshes2 = Input(merge_meshes._spec().input_pin(1), 1, op, 1) + self._meshes2: Input[MeshedRegion | MeshesContainer] = Input( + merge_meshes._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._meshes2) - self._merge_method = Input(merge_meshes._spec().input_pin(101), 101, op, -1) + self._merge_method: Input[int] = Input( + merge_meshes._spec().input_pin(101), 101, op, -1 + ) self._inputs.append(self._merge_method) - self._box_size = Input(merge_meshes._spec().input_pin(102), 102, op, -1) + self._box_size: Input[float] = Input( + merge_meshes._spec().input_pin(102), 102, op, -1 + ) self._inputs.append(self._box_size) - self._remove_duplicate_elements = Input( + self._remove_duplicate_elements: Input[int] = Input( merge_meshes._spec().input_pin(103), 103, op, -1 ) self._inputs.append(self._remove_duplicate_elements) @property - def naive_merge_elements(self) -> Input: + def naive_merge_elements(self) -> Input[bool]: r"""Allows to connect naive_merge_elements input to the operator. If true, merge the elemental Property Fields of the input meshes assuming that there is no repetition in their scoping ids. Default is false. @@ -287,7 +300,7 @@ def naive_merge_elements(self) -> Input: return self._naive_merge_elements @property - def should_merge_named_selections(self) -> Input: + def should_merge_named_selections(self) -> Input[bool]: r"""Allows to connect should_merge_named_selections input to the operator. For certain types of files (such as RST), scoping from names selection does not need to be merged.If this pin is true, the merge occurs. If this pin is false, the merge does not occur. Default is true. @@ -308,7 +321,7 @@ def should_merge_named_selections(self) -> Input: return self._should_merge_named_selections @property - def meshes1(self) -> Input: + def meshes1(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect meshes1 input to the operator. A vector of meshed region to merge or meshed region from pin 0 to ... @@ -329,7 +342,7 @@ def meshes1(self) -> Input: return self._meshes1 @property - def meshes2(self) -> Input: + def meshes2(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect meshes2 input to the operator. A vector of meshed region to merge or meshed region from pin 0 to ... @@ -350,7 +363,7 @@ def meshes2(self) -> Input: return self._meshes2 @property - def merge_method(self) -> Input: + def merge_method(self) -> Input[int]: r"""Allows to connect merge_method input to the operator. 0: merge by distance, 1: merge by node id (default) @@ -371,7 +384,7 @@ def merge_method(self) -> Input: return self._merge_method @property - def box_size(self) -> Input: + def box_size(self) -> Input[float]: r"""Allows to connect box_size input to the operator. Box size used when merging by distance. Default value is 1e-12. @@ -392,7 +405,7 @@ def box_size(self) -> Input: return self._box_size @property - def remove_duplicate_elements(self) -> Input: + def remove_duplicate_elements(self) -> Input[int]: r"""Allows to connect remove_duplicate_elements input to the operator. 0: keep duplicate elements (default), 1: remove duplicate elements @@ -427,11 +440,13 @@ class OutputsMergeMeshes(_Outputs): def __init__(self, op: Operator): super().__init__(merge_meshes._spec().outputs, op) - self._merges_mesh = Output(merge_meshes._spec().output_pin(0), 0, op) + self._merges_mesh: Output[MeshedRegion] = Output( + merge_meshes._spec().output_pin(0), 0, op + ) self._outputs.append(self._merges_mesh) @property - def merges_mesh(self) -> Output: + def merges_mesh(self) -> Output[MeshedRegion]: r"""Allows to get merges_mesh output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_meshes_containers.py b/src/ansys/dpf/core/operators/utility/merge_meshes_containers.py index a1ce4358899..bec59217124 100644 --- a/src/ansys/dpf/core/operators/utility/merge_meshes_containers.py +++ b/src/ansys/dpf/core/operators/utility/merge_meshes_containers.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshes_container import MeshesContainer + class merge_meshes_containers(Operator): r"""Assembles a set of meshes containers into a unique one. @@ -158,17 +162,17 @@ class InputsMergeMeshesContainers(_Inputs): def __init__(self, op: Operator): super().__init__(merge_meshes_containers._spec().inputs, op) - self._meshes_containers1 = Input( + self._meshes_containers1: Input[MeshesContainer] = Input( merge_meshes_containers._spec().input_pin(0), 0, op, 0 ) self._inputs.append(self._meshes_containers1) - self._meshes_containers2 = Input( + self._meshes_containers2: Input[MeshesContainer] = Input( merge_meshes_containers._spec().input_pin(1), 1, op, 1 ) self._inputs.append(self._meshes_containers2) @property - def meshes_containers1(self) -> Input: + def meshes_containers1(self) -> Input[MeshesContainer]: r"""Allows to connect meshes_containers1 input to the operator. a vector of meshes containers to merge or meshes containers from pin 0 to ... @@ -189,7 +193,7 @@ def meshes_containers1(self) -> Input: return self._meshes_containers1 @property - def meshes_containers2(self) -> Input: + def meshes_containers2(self) -> Input[MeshesContainer]: r"""Allows to connect meshes_containers2 input to the operator. a vector of meshes containers to merge or meshes containers from pin 0 to ... @@ -224,13 +228,13 @@ class OutputsMergeMeshesContainers(_Outputs): def __init__(self, op: Operator): super().__init__(merge_meshes_containers._spec().outputs, op) - self._merged_meshes_container = Output( + self._merged_meshes_container: Output[MeshesContainer] = Output( merge_meshes_containers._spec().output_pin(0), 0, op ) self._outputs.append(self._merged_meshes_container) @property - def merged_meshes_container(self) -> Output: + def merged_meshes_container(self) -> Output[MeshesContainer]: r"""Allows to get merged_meshes_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_property_fields.py b/src/ansys/dpf/core/operators/utility/merge_property_fields.py index f65f922afbc..518b66b6db7 100644 --- a/src/ansys/dpf/core/operators/utility/merge_property_fields.py +++ b/src/ansys/dpf/core/operators/utility/merge_property_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.property_field import PropertyField + class merge_property_fields(Operator): r"""Assembles a set of property fields into a unique one. @@ -178,21 +182,21 @@ class InputsMergePropertyFields(_Inputs): def __init__(self, op: Operator): super().__init__(merge_property_fields._spec().inputs, op) - self._naive_merge = Input( + self._naive_merge: Input[bool] = Input( merge_property_fields._spec().input_pin(-201), -201, op, -1 ) self._inputs.append(self._naive_merge) - self._property_fields1 = Input( + self._property_fields1: Input[PropertyField] = Input( merge_property_fields._spec().input_pin(0), 0, op, 0 ) self._inputs.append(self._property_fields1) - self._property_fields2 = Input( + self._property_fields2: Input[PropertyField] = Input( merge_property_fields._spec().input_pin(1), 1, op, 1 ) self._inputs.append(self._property_fields2) @property - def naive_merge(self) -> Input: + def naive_merge(self) -> Input[bool]: r"""Allows to connect naive_merge input to the operator. If true, merge the input property fields assuming that there is no repetition in their scoping ids. Default is false. @@ -213,7 +217,7 @@ def naive_merge(self) -> Input: return self._naive_merge @property - def property_fields1(self) -> Input: + def property_fields1(self) -> Input[PropertyField]: r"""Allows to connect property_fields1 input to the operator. Either a property fields container, a vector of property fields to merge or property fields from pin 0 to ... @@ -234,7 +238,7 @@ def property_fields1(self) -> Input: return self._property_fields1 @property - def property_fields2(self) -> Input: + def property_fields2(self) -> Input[PropertyField]: r"""Allows to connect property_fields2 input to the operator. Either a property fields container, a vector of property fields to merge or property fields from pin 0 to ... @@ -269,13 +273,13 @@ class OutputsMergePropertyFields(_Outputs): def __init__(self, op: Operator): super().__init__(merge_property_fields._spec().outputs, op) - self._property_field = Output( + self._property_field: Output[PropertyField] = Output( merge_property_fields._spec().output_pin(0), 0, op ) self._outputs.append(self._property_field) @property - def property_field(self) -> Output: + def property_field(self) -> Output[PropertyField]: r"""Allows to get property_field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_result_infos.py b/src/ansys/dpf/core/operators/utility/merge_result_infos.py index ddd18385267..e9450320304 100644 --- a/src/ansys/dpf/core/operators/utility/merge_result_infos.py +++ b/src/ansys/dpf/core/operators/utility/merge_result_infos.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.result_info import ResultInfo + class merge_result_infos(Operator): r"""Assembles a set of result information into a unique one. @@ -158,13 +162,17 @@ class InputsMergeResultInfos(_Inputs): def __init__(self, op: Operator): super().__init__(merge_result_infos._spec().inputs, op) - self._result_infos1 = Input(merge_result_infos._spec().input_pin(0), 0, op, 0) + self._result_infos1: Input[ResultInfo] = Input( + merge_result_infos._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._result_infos1) - self._result_infos2 = Input(merge_result_infos._spec().input_pin(1), 1, op, 1) + self._result_infos2: Input[ResultInfo] = Input( + merge_result_infos._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._result_infos2) @property - def result_infos1(self) -> Input: + def result_infos1(self) -> Input[ResultInfo]: r"""Allows to connect result_infos1 input to the operator. A vector of result info containers to merge or result infos from pin 0 to ... @@ -185,7 +193,7 @@ def result_infos1(self) -> Input: return self._result_infos1 @property - def result_infos2(self) -> Input: + def result_infos2(self) -> Input[ResultInfo]: r"""Allows to connect result_infos2 input to the operator. A vector of result info containers to merge or result infos from pin 0 to ... @@ -220,13 +228,13 @@ class OutputsMergeResultInfos(_Outputs): def __init__(self, op: Operator): super().__init__(merge_result_infos._spec().outputs, op) - self._merged_result_infos = Output( + self._merged_result_infos: Output[ResultInfo] = Output( merge_result_infos._spec().output_pin(0), 0, op ) self._outputs.append(self._merged_result_infos) @property - def merged_result_infos(self) -> Output: + def merged_result_infos(self) -> Output[ResultInfo]: r"""Allows to get merged_result_infos output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_scopings.py b/src/ansys/dpf/core/operators/utility/merge_scopings.py index d1b631bbc90..51f8270bf3b 100644 --- a/src/ansys/dpf/core/operators/utility/merge_scopings.py +++ b/src/ansys/dpf/core/operators/utility/merge_scopings.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.scopings_container import ScopingsContainer + class merge_scopings(Operator): r"""Assembles a set of scopings into a unique one. @@ -156,13 +161,17 @@ class InputsMergeScopings(_Inputs): def __init__(self, op: Operator): super().__init__(merge_scopings._spec().inputs, op) - self._scopings1 = Input(merge_scopings._spec().input_pin(0), 0, op, 0) + self._scopings1: Input[Scoping | ScopingsContainer] = Input( + merge_scopings._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._scopings1) - self._scopings2 = Input(merge_scopings._spec().input_pin(1), 1, op, 1) + self._scopings2: Input[Scoping | ScopingsContainer] = Input( + merge_scopings._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._scopings2) @property - def scopings1(self) -> Input: + def scopings1(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect scopings1 input to the operator. Either a scopings container, a vector of scopings to merge, or scopings from pin 0 to ... @@ -183,7 +192,7 @@ def scopings1(self) -> Input: return self._scopings1 @property - def scopings2(self) -> Input: + def scopings2(self) -> Input[Scoping | ScopingsContainer]: r"""Allows to connect scopings2 input to the operator. Either a scopings container, a vector of scopings to merge, or scopings from pin 0 to ... @@ -218,11 +227,13 @@ class OutputsMergeScopings(_Outputs): def __init__(self, op: Operator): super().__init__(merge_scopings._spec().outputs, op) - self._merged_scoping = Output(merge_scopings._spec().output_pin(0), 0, op) + self._merged_scoping: Output[Scoping] = Output( + merge_scopings._spec().output_pin(0), 0, op + ) self._outputs.append(self._merged_scoping) @property - def merged_scoping(self) -> Output: + def merged_scoping(self) -> Output[Scoping]: r"""Allows to get merged_scoping output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_scopings_containers.py b/src/ansys/dpf/core/operators/utility/merge_scopings_containers.py index edbb2fad31c..778f0815d58 100644 --- a/src/ansys/dpf/core/operators/utility/merge_scopings_containers.py +++ b/src/ansys/dpf/core/operators/utility/merge_scopings_containers.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.scopings_container import ScopingsContainer + class merge_scopings_containers(Operator): r"""Assembles a set of scopings containers into a unique one. @@ -162,17 +166,17 @@ class InputsMergeScopingsContainers(_Inputs): def __init__(self, op: Operator): super().__init__(merge_scopings_containers._spec().inputs, op) - self._scopings_containers1 = Input( + self._scopings_containers1: Input[ScopingsContainer] = Input( merge_scopings_containers._spec().input_pin(0), 0, op, 0 ) self._inputs.append(self._scopings_containers1) - self._scopings_containers2 = Input( + self._scopings_containers2: Input[ScopingsContainer] = Input( merge_scopings_containers._spec().input_pin(1), 1, op, 1 ) self._inputs.append(self._scopings_containers2) @property - def scopings_containers1(self) -> Input: + def scopings_containers1(self) -> Input[ScopingsContainer]: r"""Allows to connect scopings_containers1 input to the operator. A vector of scopings containers to merge or scopings containers from pin 0 to ... @@ -193,7 +197,7 @@ def scopings_containers1(self) -> Input: return self._scopings_containers1 @property - def scopings_containers2(self) -> Input: + def scopings_containers2(self) -> Input[ScopingsContainer]: r"""Allows to connect scopings_containers2 input to the operator. A vector of scopings containers to merge or scopings containers from pin 0 to ... @@ -228,13 +232,13 @@ class OutputsMergeScopingsContainers(_Outputs): def __init__(self, op: Operator): super().__init__(merge_scopings_containers._spec().outputs, op) - self._merged_scopings_container = Output( + self._merged_scopings_container: Output[ScopingsContainer] = Output( merge_scopings_containers._spec().output_pin(0), 0, op ) self._outputs.append(self._merged_scopings_container) @property - def merged_scopings_container(self) -> Output: + def merged_scopings_container(self) -> Output[ScopingsContainer]: r"""Allows to get merged_scopings_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_string_fields.py b/src/ansys/dpf/core/operators/utility/merge_string_fields.py index c490570436a..9143ce22cc8 100644 --- a/src/ansys/dpf/core/operators/utility/merge_string_fields.py +++ b/src/ansys/dpf/core/operators/utility/merge_string_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.string_field import StringField + class merge_string_fields(Operator): r"""Assembles a set of string fields into a unique one. @@ -158,13 +162,17 @@ class InputsMergeStringFields(_Inputs): def __init__(self, op: Operator): super().__init__(merge_string_fields._spec().inputs, op) - self._string_fields1 = Input(merge_string_fields._spec().input_pin(0), 0, op, 0) + self._string_fields1: Input[StringField] = Input( + merge_string_fields._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._string_fields1) - self._string_fields2 = Input(merge_string_fields._spec().input_pin(1), 1, op, 1) + self._string_fields2: Input[StringField] = Input( + merge_string_fields._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._string_fields2) @property - def string_fields1(self) -> Input: + def string_fields1(self) -> Input[StringField]: r"""Allows to connect string_fields1 input to the operator. Either a a vector of string fields to merge or string fields from pin 0 to ... @@ -185,7 +193,7 @@ def string_fields1(self) -> Input: return self._string_fields1 @property - def string_fields2(self) -> Input: + def string_fields2(self) -> Input[StringField]: r"""Allows to connect string_fields2 input to the operator. Either a a vector of string fields to merge or string fields from pin 0 to ... @@ -220,11 +228,13 @@ class OutputsMergeStringFields(_Outputs): def __init__(self, op: Operator): super().__init__(merge_string_fields._spec().outputs, op) - self._string_field = Output(merge_string_fields._spec().output_pin(0), 0, op) + self._string_field: Output[StringField] = Output( + merge_string_fields._spec().output_pin(0), 0, op + ) self._outputs.append(self._string_field) @property - def string_field(self) -> Output: + def string_field(self) -> Output[StringField]: r"""Allows to get string_field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_supports.py b/src/ansys/dpf/core/operators/utility/merge_supports.py index 4a1680f4e6d..665bf967d5a 100644 --- a/src/ansys/dpf/core/operators/utility/merge_supports.py +++ b/src/ansys/dpf/core/operators/utility/merge_supports.py @@ -180,17 +180,17 @@ class InputsMergeSupports(_Inputs): def __init__(self, op: Operator): super().__init__(merge_supports._spec().inputs, op) - self._should_merge_named_selections = Input( + self._should_merge_named_selections: Input[bool] = Input( merge_supports._spec().input_pin(-200), -200, op, -1 ) self._inputs.append(self._should_merge_named_selections) - self._supports1 = Input(merge_supports._spec().input_pin(0), 0, op, 0) + self._supports1: Input = Input(merge_supports._spec().input_pin(0), 0, op, 0) self._inputs.append(self._supports1) - self._supports2 = Input(merge_supports._spec().input_pin(1), 1, op, 1) + self._supports2: Input = Input(merge_supports._spec().input_pin(1), 1, op, 1) self._inputs.append(self._supports2) @property - def should_merge_named_selections(self) -> Input: + def should_merge_named_selections(self) -> Input[bool]: r"""Allows to connect should_merge_named_selections input to the operator. For some result files (such as RST), the scoping on names selection is duplicated through all the distributed files.If this pin is false, the merging process is skipped. If it is true, this scoping is merged. Default is true. @@ -267,7 +267,9 @@ class OutputsMergeSupports(_Outputs): def __init__(self, op: Operator): super().__init__(merge_supports._spec().outputs, op) - self._merged_support = Output(merge_supports._spec().output_pin(0), 0, op) + self._merged_support: Output = Output( + merge_supports._spec().output_pin(0), 0, op + ) self._outputs.append(self._merged_support) @property diff --git a/src/ansys/dpf/core/operators/utility/merge_time_freq_supports.py b/src/ansys/dpf/core/operators/utility/merge_time_freq_supports.py index 5b734526be9..d8071c28a2d 100644 --- a/src/ansys/dpf/core/operators/utility/merge_time_freq_supports.py +++ b/src/ansys/dpf/core/operators/utility/merge_time_freq_supports.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class merge_time_freq_supports(Operator): r"""Assembles a set of time/frequency supports into a unique one. @@ -162,17 +166,17 @@ class InputsMergeTimeFreqSupports(_Inputs): def __init__(self, op: Operator): super().__init__(merge_time_freq_supports._spec().inputs, op) - self._time_freq_supports1 = Input( + self._time_freq_supports1: Input[TimeFreqSupport] = Input( merge_time_freq_supports._spec().input_pin(0), 0, op, 0 ) self._inputs.append(self._time_freq_supports1) - self._time_freq_supports2 = Input( + self._time_freq_supports2: Input[TimeFreqSupport] = Input( merge_time_freq_supports._spec().input_pin(1), 1, op, 1 ) self._inputs.append(self._time_freq_supports2) @property - def time_freq_supports1(self) -> Input: + def time_freq_supports1(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_supports1 input to the operator. A vector of time/frequency supports to merge or time/frequency supports from pin 0 to ... @@ -193,7 +197,7 @@ def time_freq_supports1(self) -> Input: return self._time_freq_supports1 @property - def time_freq_supports2(self) -> Input: + def time_freq_supports2(self) -> Input[TimeFreqSupport]: r"""Allows to connect time_freq_supports2 input to the operator. A vector of time/frequency supports to merge or time/frequency supports from pin 0 to ... @@ -228,13 +232,13 @@ class OutputsMergeTimeFreqSupports(_Outputs): def __init__(self, op: Operator): super().__init__(merge_time_freq_supports._spec().outputs, op) - self._merged_support = Output( + self._merged_support: Output[TimeFreqSupport] = Output( merge_time_freq_supports._spec().output_pin(0), 0, op ) self._outputs.append(self._merged_support) @property - def merged_support(self) -> Output: + def merged_support(self) -> Output[TimeFreqSupport]: r"""Allows to get merged_support output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_to_field_matrix.py b/src/ansys/dpf/core/operators/utility/merge_to_field_matrix.py index cf5fce72779..d915972e023 100644 --- a/src/ansys/dpf/core/operators/utility/merge_to_field_matrix.py +++ b/src/ansys/dpf/core/operators/utility/merge_to_field_matrix.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class merge_to_field_matrix(Operator): r"""Assembles a set of fields into a field matrix. @@ -156,13 +161,17 @@ class InputsMergeToFieldMatrix(_Inputs): def __init__(self, op: Operator): super().__init__(merge_to_field_matrix._spec().inputs, op) - self._fields1 = Input(merge_to_field_matrix._spec().input_pin(0), 0, op, 0) + self._fields1: Input[Field | FieldsContainer] = Input( + merge_to_field_matrix._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._fields1) - self._fields2 = Input(merge_to_field_matrix._spec().input_pin(1), 1, op, 1) + self._fields2: Input[Field | FieldsContainer] = Input( + merge_to_field_matrix._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._fields2) @property - def fields1(self) -> Input: + def fields1(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fields1 input to the operator. Either a fields container, a vector of fields to merge, or fields from pin 0 to ... @@ -183,7 +192,7 @@ def fields1(self) -> Input: return self._fields1 @property - def fields2(self) -> Input: + def fields2(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fields2 input to the operator. Either a fields container, a vector of fields to merge, or fields from pin 0 to ... @@ -218,13 +227,13 @@ class OutputsMergeToFieldMatrix(_Outputs): def __init__(self, op: Operator): super().__init__(merge_to_field_matrix._spec().outputs, op) - self._merged_field_matrix = Output( + self._merged_field_matrix: Output[Field] = Output( merge_to_field_matrix._spec().output_pin(0), 0, op ) self._outputs.append(self._merged_field_matrix) @property - def merged_field_matrix(self) -> Output: + def merged_field_matrix(self) -> Output[Field]: r"""Allows to get merged_field_matrix output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_weighted_fields.py b/src/ansys/dpf/core/operators/utility/merge_weighted_fields.py index 65b9becd8c5..6a2d283af94 100644 --- a/src/ansys/dpf/core/operators/utility/merge_weighted_fields.py +++ b/src/ansys/dpf/core/operators/utility/merge_weighted_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.property_field import PropertyField + class merge_weighted_fields(Operator): r"""Assembles a set of fields into a unique one, applying a weight on the @@ -228,27 +234,33 @@ class InputsMergeWeightedFields(_Inputs): def __init__(self, op: Operator): super().__init__(merge_weighted_fields._spec().inputs, op) - self._sum_merge = Input(merge_weighted_fields._spec().input_pin(-2), -2, op, -1) + self._sum_merge: Input[bool] = Input( + merge_weighted_fields._spec().input_pin(-2), -2, op, -1 + ) self._inputs.append(self._sum_merge) - self._merged_support = Input( + self._merged_support: Input = Input( merge_weighted_fields._spec().input_pin(-1), -1, op, -1 ) self._inputs.append(self._merged_support) - self._fields1 = Input(merge_weighted_fields._spec().input_pin(0), 0, op, 0) + self._fields1: Input[Field | FieldsContainer] = Input( + merge_weighted_fields._spec().input_pin(0), 0, op, 0 + ) self._inputs.append(self._fields1) - self._fields2 = Input(merge_weighted_fields._spec().input_pin(1), 1, op, 1) + self._fields2: Input[Field | FieldsContainer] = Input( + merge_weighted_fields._spec().input_pin(1), 1, op, 1 + ) self._inputs.append(self._fields2) - self._weights1 = Input( + self._weights1: Input[PropertyField] = Input( merge_weighted_fields._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._weights1) - self._weights2 = Input( + self._weights2: Input[PropertyField] = Input( merge_weighted_fields._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._weights2) @property - def sum_merge(self) -> Input: + def sum_merge(self) -> Input[bool]: r"""Allows to connect sum_merge input to the operator. Default is false. If true, redundant quantities are summed instead of being ignored. @@ -290,7 +302,7 @@ def merged_support(self) -> Input: return self._merged_support @property - def fields1(self) -> Input: + def fields1(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fields1 input to the operator. Either a fields container, a vector of fields to merge, or fields from pin 0 to ... @@ -311,7 +323,7 @@ def fields1(self) -> Input: return self._fields1 @property - def fields2(self) -> Input: + def fields2(self) -> Input[Field | FieldsContainer]: r"""Allows to connect fields2 input to the operator. Either a fields container, a vector of fields to merge, or fields from pin 0 to ... @@ -332,7 +344,7 @@ def fields2(self) -> Input: return self._fields2 @property - def weights1(self) -> Input: + def weights1(self) -> Input[PropertyField]: r"""Allows to connect weights1 input to the operator. Weights to apply to each field from pin 1000 to ... @@ -353,7 +365,7 @@ def weights1(self) -> Input: return self._weights1 @property - def weights2(self) -> Input: + def weights2(self) -> Input[PropertyField]: r"""Allows to connect weights2 input to the operator. Weights to apply to each field from pin 1000 to ... @@ -388,11 +400,13 @@ class OutputsMergeWeightedFields(_Outputs): def __init__(self, op: Operator): super().__init__(merge_weighted_fields._spec().outputs, op) - self._merged_field = Output(merge_weighted_fields._spec().output_pin(0), 0, op) + self._merged_field: Output[Field] = Output( + merge_weighted_fields._spec().output_pin(0), 0, op + ) self._outputs.append(self._merged_field) @property - def merged_field(self) -> Output: + def merged_field(self) -> Output[Field]: r"""Allows to get merged_field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/merge_weighted_fields_containers.py b/src/ansys/dpf/core/operators/utility/merge_weighted_fields_containers.py index bdee65964dd..13a4c537b04 100644 --- a/src/ansys/dpf/core/operators/utility/merge_weighted_fields_containers.py +++ b/src/ansys/dpf/core/operators/utility/merge_weighted_fields_containers.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class merge_weighted_fields_containers(Operator): r"""Assembles a set of fields containers into a unique one, applying a @@ -33,10 +37,10 @@ class merge_weighted_fields_containers(Operator): fields_containers2: FieldsContainer A vector of fields containers to merge or fields containers from pin 0 to ... weights1: Class Dataprocessing::Dpftypecollection<Class - Dataprocessing::Cpropertyfield> + Dataprocessing::Cpropertyfield> Weights to apply to each field from pin 1000 to ... weights2: Class Dataprocessing::Dpftypecollection<Class - Dataprocessing::Cpropertyfield> + Dataprocessing::Cpropertyfield> Weights to apply to each field from pin 1000 to ... Outputs @@ -259,37 +263,37 @@ class InputsMergeWeightedFieldsContainers(_Inputs): def __init__(self, op: Operator): super().__init__(merge_weighted_fields_containers._spec().inputs, op) - self._sum_merge = Input( + self._sum_merge: Input[bool] = Input( merge_weighted_fields_containers._spec().input_pin(-3), -3, op, -1 ) self._inputs.append(self._sum_merge) - self._merged_fields_support = Input( + self._merged_fields_support: Input = Input( merge_weighted_fields_containers._spec().input_pin(-2), -2, op, -1 ) self._inputs.append(self._merged_fields_support) - self._merged_fields_containers_support = Input( + self._merged_fields_containers_support: Input = Input( merge_weighted_fields_containers._spec().input_pin(-1), -1, op, -1 ) self._inputs.append(self._merged_fields_containers_support) - self._fields_containers1 = Input( + self._fields_containers1: Input[FieldsContainer] = Input( merge_weighted_fields_containers._spec().input_pin(0), 0, op, 0 ) self._inputs.append(self._fields_containers1) - self._fields_containers2 = Input( + self._fields_containers2: Input[FieldsContainer] = Input( merge_weighted_fields_containers._spec().input_pin(1), 1, op, 1 ) self._inputs.append(self._fields_containers2) - self._weights1 = Input( + self._weights1: Input = Input( merge_weighted_fields_containers._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._weights1) - self._weights2 = Input( + self._weights2: Input = Input( merge_weighted_fields_containers._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._weights2) @property - def sum_merge(self) -> Input: + def sum_merge(self) -> Input[bool]: r"""Allows to connect sum_merge input to the operator. Default is false. If true, redundant quantities are summed instead of being ignored. @@ -352,7 +356,7 @@ def merged_fields_containers_support(self) -> Input: return self._merged_fields_containers_support @property - def fields_containers1(self) -> Input: + def fields_containers1(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containers1 input to the operator. A vector of fields containers to merge or fields containers from pin 0 to ... @@ -373,7 +377,7 @@ def fields_containers1(self) -> Input: return self._fields_containers1 @property - def fields_containers2(self) -> Input: + def fields_containers2(self) -> Input[FieldsContainer]: r"""Allows to connect fields_containers2 input to the operator. A vector of fields containers to merge or fields containers from pin 0 to ... @@ -450,13 +454,13 @@ class OutputsMergeWeightedFieldsContainers(_Outputs): def __init__(self, op: Operator): super().__init__(merge_weighted_fields_containers._spec().outputs, op) - self._merged_fields_container = Output( + self._merged_fields_container: Output[FieldsContainer] = Output( merge_weighted_fields_containers._spec().output_pin(0), 0, op ) self._outputs.append(self._merged_fields_container) @property - def merged_fields_container(self) -> Output: + def merged_fields_container(self) -> Output[FieldsContainer]: r"""Allows to get merged_fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/mesh.py b/src/ansys/dpf/core/operators/utility/mesh.py index be67f93caa9..9c8756f7c39 100644 --- a/src/ansys/dpf/core/operators/utility/mesh.py +++ b/src/ansys/dpf/core/operators/utility/mesh.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + class mesh(Operator): r"""Incrementaly merge the input. @@ -139,11 +143,11 @@ class InputsMesh(_Inputs): def __init__(self, op: Operator): super().__init__(mesh._spec().inputs, op) - self._input = Input(mesh._spec().input_pin(0), 0, op, -1) + self._input: Input[MeshedRegion] = Input(mesh._spec().input_pin(0), 0, op, -1) self._inputs.append(self._input) @property - def input(self) -> Input: + def input(self) -> Input[MeshedRegion]: r"""Allows to connect input input to the operator. Returns @@ -176,7 +180,7 @@ class OutputsMesh(_Outputs): def __init__(self, op: Operator): super().__init__(mesh._spec().outputs, op) - self._incremented_result = Output(mesh._spec().output_pin(0), 0, op) + self._incremented_result: Output = Output(mesh._spec().output_pin(0), 0, op) self._outputs.append(self._incremented_result) @property diff --git a/src/ansys/dpf/core/operators/utility/mesh_to_mc.py b/src/ansys/dpf/core/operators/utility/mesh_to_mc.py index f88dc00f399..92aa48322f8 100644 --- a/src/ansys/dpf/core/operators/utility/mesh_to_mc.py +++ b/src/ansys/dpf/core/operators/utility/mesh_to_mc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshed_region import MeshedRegion + from ansys.dpf.core.meshes_container import MeshesContainer + class mesh_to_mc(Operator): r"""Creates a meshes container containing the mesh provided on pin 0. @@ -156,13 +161,15 @@ class InputsMeshToMc(_Inputs): def __init__(self, op: Operator): super().__init__(mesh_to_mc._spec().inputs, op) - self._mesh = Input(mesh_to_mc._spec().input_pin(0), 0, op, -1) + self._mesh: Input[MeshedRegion | MeshesContainer] = Input( + mesh_to_mc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._mesh) - self._label = Input(mesh_to_mc._spec().input_pin(1), 1, op, -1) + self._label: Input[dict] = Input(mesh_to_mc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._label) @property - def mesh(self) -> Input: + def mesh(self) -> Input[MeshedRegion | MeshesContainer]: r"""Allows to connect mesh input to the operator. If a meshes container is set in input, it is passed on as an output with the additional label space (if any). @@ -183,7 +190,7 @@ def mesh(self) -> Input: return self._mesh @property - def label(self) -> Input: + def label(self) -> Input[dict]: r"""Allows to connect label input to the operator. Sets a label space. @@ -218,11 +225,13 @@ class OutputsMeshToMc(_Outputs): def __init__(self, op: Operator): super().__init__(mesh_to_mc._spec().outputs, op) - self._meshes_container = Output(mesh_to_mc._spec().output_pin(0), 0, op) + self._meshes_container: Output[MeshesContainer] = Output( + mesh_to_mc._spec().output_pin(0), 0, op + ) self._outputs.append(self._meshes_container) @property - def meshes_container(self) -> Output: + def meshes_container(self) -> Output[MeshesContainer]: r"""Allows to get meshes_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/meshes_container.py b/src/ansys/dpf/core/operators/utility/meshes_container.py index 869f14c9957..c562c2f08cc 100644 --- a/src/ansys/dpf/core/operators/utility/meshes_container.py +++ b/src/ansys/dpf/core/operators/utility/meshes_container.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.meshes_container import MeshesContainer + class meshes_container(Operator): r"""Incrementaly merge the input. @@ -143,11 +147,13 @@ class InputsMeshesContainer(_Inputs): def __init__(self, op: Operator): super().__init__(meshes_container._spec().inputs, op) - self._input = Input(meshes_container._spec().input_pin(0), 0, op, -1) + self._input: Input[MeshesContainer] = Input( + meshes_container._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._input) @property - def input(self) -> Input: + def input(self) -> Input[MeshesContainer]: r"""Allows to connect input input to the operator. Returns @@ -180,7 +186,9 @@ class OutputsMeshesContainer(_Outputs): def __init__(self, op: Operator): super().__init__(meshes_container._spec().outputs, op) - self._incremented_result = Output(meshes_container._spec().output_pin(0), 0, op) + self._incremented_result: Output = Output( + meshes_container._spec().output_pin(0), 0, op + ) self._outputs.append(self._incremented_result) @property diff --git a/src/ansys/dpf/core/operators/utility/operator_changelog.py b/src/ansys/dpf/core/operators/utility/operator_changelog.py index e2949ce0763..0f2303dc3c4 100644 --- a/src/ansys/dpf/core/operators/utility/operator_changelog.py +++ b/src/ansys/dpf/core/operators/utility/operator_changelog.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.generic_data_container import GenericDataContainer + class operator_changelog(Operator): r"""Return a GenericDataContainer used to instantiate the Changelog of an @@ -144,11 +148,13 @@ class InputsOperatorChangelog(_Inputs): def __init__(self, op: Operator): super().__init__(operator_changelog._spec().inputs, op) - self._operator_name = Input(operator_changelog._spec().input_pin(0), 0, op, -1) + self._operator_name: Input[str] = Input( + operator_changelog._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._operator_name) @property - def operator_name(self) -> Input: + def operator_name(self) -> Input[str]: r"""Allows to connect operator_name input to the operator. Operator internal name. @@ -183,11 +189,13 @@ class OutputsOperatorChangelog(_Outputs): def __init__(self, op: Operator): super().__init__(operator_changelog._spec().outputs, op) - self._changelog_gdc = Output(operator_changelog._spec().output_pin(0), 0, op) + self._changelog_gdc: Output[GenericDataContainer] = Output( + operator_changelog._spec().output_pin(0), 0, op + ) self._outputs.append(self._changelog_gdc) @property - def changelog_gdc(self) -> Output: + def changelog_gdc(self) -> Output[GenericDataContainer]: r"""Allows to get changelog_gdc output of the operator GenericDataContainer used to instantiate a Changelog. diff --git a/src/ansys/dpf/core/operators/utility/operator_id.py b/src/ansys/dpf/core/operators/utility/operator_id.py index 884b44cb2c2..e4be5e771e9 100644 --- a/src/ansys/dpf/core/operators/utility/operator_id.py +++ b/src/ansys/dpf/core/operators/utility/operator_id.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.dpf_operator import Operator + class operator_id(Operator): r"""Return the id of an Operator. @@ -140,11 +144,11 @@ class InputsOperatorId(_Inputs): def __init__(self, op: Operator): super().__init__(operator_id._spec().inputs, op) - self._op = Input(operator_id._spec().input_pin(0), 0, op, -1) + self._op: Input[Operator] = Input(operator_id._spec().input_pin(0), 0, op, -1) self._inputs.append(self._op) @property - def op(self) -> Input: + def op(self) -> Input[Operator]: r"""Allows to connect op input to the operator. Returns @@ -177,11 +181,11 @@ class OutputsOperatorId(_Outputs): def __init__(self, op: Operator): super().__init__(operator_id._spec().outputs, op) - self._id = Output(operator_id._spec().output_pin(0), 0, op) + self._id: Output[int] = Output(operator_id._spec().output_pin(0), 0, op) self._outputs.append(self._id) @property - def id(self) -> Output: + def id(self) -> Output[int]: r"""Allows to get id output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/overlap_fields.py b/src/ansys/dpf/core/operators/utility/overlap_fields.py index 88ca2308d41..3335c1fba80 100644 --- a/src/ansys/dpf/core/operators/utility/overlap_fields.py +++ b/src/ansys/dpf/core/operators/utility/overlap_fields.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + class overlap_fields(Operator): r"""Take two fields and superpose them, the overlapping field will override @@ -158,13 +162,17 @@ class InputsOverlapFields(_Inputs): def __init__(self, op: Operator): super().__init__(overlap_fields._spec().inputs, op) - self._base_field = Input(overlap_fields._spec().input_pin(0), 0, op, -1) + self._base_field: Input[Field] = Input( + overlap_fields._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._base_field) - self._overlapping_field = Input(overlap_fields._spec().input_pin(1), 1, op, -1) + self._overlapping_field: Input[Field] = Input( + overlap_fields._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._overlapping_field) @property - def base_field(self) -> Input: + def base_field(self) -> Input[Field]: r"""Allows to connect base_field input to the operator. Returns @@ -183,7 +191,7 @@ def base_field(self) -> Input: return self._base_field @property - def overlapping_field(self) -> Input: + def overlapping_field(self) -> Input[Field]: r"""Allows to connect overlapping_field input to the operator. Returns @@ -216,11 +224,11 @@ class OutputsOverlapFields(_Outputs): def __init__(self, op: Operator): super().__init__(overlap_fields._spec().outputs, op) - self._field = Output(overlap_fields._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(overlap_fields._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/producer_consumer_for_each.py b/src/ansys/dpf/core/operators/utility/producer_consumer_for_each.py index 16ca0e625da..ee57e226b56 100644 --- a/src/ansys/dpf/core/operators/utility/producer_consumer_for_each.py +++ b/src/ansys/dpf/core/operators/utility/producer_consumer_for_each.py @@ -199,15 +199,15 @@ class InputsProducerConsumerForEach(_Inputs): def __init__(self, op: Operator): super().__init__(producer_consumer_for_each._spec().inputs, op) - self._producer_consumer_iterableq = Input( + self._producer_consumer_iterableq: Input = Input( producer_consumer_for_each._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._producer_consumer_iterableq) - self._forward1 = Input( + self._forward1: Input = Input( producer_consumer_for_each._spec().input_pin(3), 3, op, 0 ) self._inputs.append(self._forward1) - self._forward2 = Input( + self._forward2: Input = Input( producer_consumer_for_each._spec().input_pin(4), 4, op, 1 ) self._inputs.append(self._forward2) @@ -292,11 +292,17 @@ class OutputsProducerConsumerForEach(_Outputs): def __init__(self, op: Operator): super().__init__(producer_consumer_for_each._spec().outputs, op) - self._empty = Output(producer_consumer_for_each._spec().output_pin(0), 0, op) + self._empty: Output = Output( + producer_consumer_for_each._spec().output_pin(0), 0, op + ) self._outputs.append(self._empty) - self._output1 = Output(producer_consumer_for_each._spec().output_pin(3), 3, op) + self._output1: Output = Output( + producer_consumer_for_each._spec().output_pin(3), 3, op + ) self._outputs.append(self._output1) - self._output2 = Output(producer_consumer_for_each._spec().output_pin(4), 4, op) + self._output2: Output = Output( + producer_consumer_for_each._spec().output_pin(4), 4, op + ) self._outputs.append(self._output2) @property diff --git a/src/ansys/dpf/core/operators/utility/property_field.py b/src/ansys/dpf/core/operators/utility/property_field.py index b0ba03984c4..bbdea494f04 100644 --- a/src/ansys/dpf/core/operators/utility/property_field.py +++ b/src/ansys/dpf/core/operators/utility/property_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.property_field import PropertyField + class property_field(Operator): r"""Incrementaly merge the input. @@ -143,11 +147,13 @@ class InputsPropertyField(_Inputs): def __init__(self, op: Operator): super().__init__(property_field._spec().inputs, op) - self._input = Input(property_field._spec().input_pin(0), 0, op, -1) + self._input: Input[PropertyField] = Input( + property_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._input) @property - def input(self) -> Input: + def input(self) -> Input[PropertyField]: r"""Allows to connect input input to the operator. Returns @@ -180,7 +186,9 @@ class OutputsPropertyField(_Outputs): def __init__(self, op: Operator): super().__init__(property_field._spec().outputs, op) - self._incremented_result = Output(property_field._spec().output_pin(0), 0, op) + self._incremented_result: Output = Output( + property_field._spec().output_pin(0), 0, op + ) self._outputs.append(self._incremented_result) @property diff --git a/src/ansys/dpf/core/operators/utility/propertyfield_get_attribute.py b/src/ansys/dpf/core/operators/utility/propertyfield_get_attribute.py index 9a03f259c60..d1cd5ac292a 100644 --- a/src/ansys/dpf/core/operators/utility/propertyfield_get_attribute.py +++ b/src/ansys/dpf/core/operators/utility/propertyfield_get_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.property_field import PropertyField + class propertyfield_get_attribute(Operator): r"""Gets a property from an input field/field container. A PropertyFieldin @@ -170,17 +174,17 @@ class InputsPropertyfieldGetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(propertyfield_get_attribute._spec().inputs, op) - self._property_field = Input( + self._property_field: Input[PropertyField] = Input( propertyfield_get_attribute._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._property_field) - self._property_name = Input( + self._property_name: Input[str] = Input( propertyfield_get_attribute._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._property_name) @property - def property_field(self) -> Input: + def property_field(self) -> Input[PropertyField]: r"""Allows to connect property_field input to the operator. Returns @@ -199,7 +203,7 @@ def property_field(self) -> Input: return self._property_field @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Property to get. Accepted inputs are specific strings namely: 'unit, 'name','time_freq_support', 'scoping' and 'header'. diff --git a/src/ansys/dpf/core/operators/utility/python_generator.py b/src/ansys/dpf/core/operators/utility/python_generator.py index 940e7350189..5d12b12a895 100644 --- a/src/ansys/dpf/core/operators/utility/python_generator.py +++ b/src/ansys/dpf/core/operators/utility/python_generator.py @@ -180,17 +180,25 @@ class InputsPythonGenerator(_Inputs): def __init__(self, op: Operator): super().__init__(python_generator._spec().inputs, op) - self._dll_source_path = Input(python_generator._spec().input_pin(0), 0, op, -1) + self._dll_source_path: Input[str] = Input( + python_generator._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._dll_source_path) - self._output_path = Input(python_generator._spec().input_pin(1), 1, op, -1) + self._output_path: Input[str] = Input( + python_generator._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._output_path) - self._load_symbol = Input(python_generator._spec().input_pin(2), 2, op, -1) + self._load_symbol: Input[str] = Input( + python_generator._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._load_symbol) - self._library_key = Input(python_generator._spec().input_pin(3), 3, op, -1) + self._library_key: Input[str] = Input( + python_generator._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._library_key) @property - def dll_source_path(self) -> Input: + def dll_source_path(self) -> Input[str]: r"""Allows to connect dll_source_path input to the operator. Returns @@ -209,7 +217,7 @@ def dll_source_path(self) -> Input: return self._dll_source_path @property - def output_path(self) -> Input: + def output_path(self) -> Input[str]: r"""Allows to connect output_path input to the operator. Returns @@ -228,7 +236,7 @@ def output_path(self) -> Input: return self._output_path @property - def load_symbol(self) -> Input: + def load_symbol(self) -> Input[str]: r"""Allows to connect load_symbol input to the operator. Returns @@ -247,7 +255,7 @@ def load_symbol(self) -> Input: return self._load_symbol @property - def library_key(self) -> Input: + def library_key(self) -> Input[str]: r"""Allows to connect library_key input to the operator. Returns diff --git a/src/ansys/dpf/core/operators/utility/remote_operator_instantiate.py b/src/ansys/dpf/core/operators/utility/remote_operator_instantiate.py index 110dd68def1..de7c9d8d38e 100644 --- a/src/ansys/dpf/core/operators/utility/remote_operator_instantiate.py +++ b/src/ansys/dpf/core/operators/utility/remote_operator_instantiate.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.workflow import Workflow + class remote_operator_instantiate(Operator): r"""Create a local image of an existing remote operator (identified by an id @@ -217,29 +223,29 @@ class InputsRemoteOperatorInstantiate(_Inputs): def __init__(self, op: Operator): super().__init__(remote_operator_instantiate._spec().inputs, op) - self._operator_to_send = Input( + self._operator_to_send: Input[int] = Input( remote_operator_instantiate._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._operator_to_send) - self._output_pin = Input( + self._output_pin: Input[int] = Input( remote_operator_instantiate._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._output_pin) - self._streams_to_remote = Input( + self._streams_to_remote: Input[StreamsContainer] = Input( remote_operator_instantiate._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_to_remote) - self._data_sources_to_remote = Input( + self._data_sources_to_remote: Input[DataSources] = Input( remote_operator_instantiate._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources_to_remote) - self._output_name = Input( + self._output_name: Input[str] = Input( remote_operator_instantiate._spec().input_pin(5), 5, op, -1 ) self._inputs.append(self._output_name) @property - def operator_to_send(self) -> Input: + def operator_to_send(self) -> Input[int]: r"""Allows to connect operator_to_send input to the operator. local workflow to push to a remote or id of a remote workflow @@ -260,7 +266,7 @@ def operator_to_send(self) -> Input: return self._operator_to_send @property - def output_pin(self) -> Input: + def output_pin(self) -> Input[int]: r"""Allows to connect output_pin input to the operator. pin number of the output to name @@ -281,7 +287,7 @@ def output_pin(self) -> Input: return self._output_pin @property - def streams_to_remote(self) -> Input: + def streams_to_remote(self) -> Input[StreamsContainer]: r"""Allows to connect streams_to_remote input to the operator. Returns @@ -300,7 +306,7 @@ def streams_to_remote(self) -> Input: return self._streams_to_remote @property - def data_sources_to_remote(self) -> Input: + def data_sources_to_remote(self) -> Input[DataSources]: r"""Allows to connect data_sources_to_remote input to the operator. Returns @@ -319,7 +325,7 @@ def data_sources_to_remote(self) -> Input: return self._data_sources_to_remote @property - def output_name(self) -> Input: + def output_name(self) -> Input[str]: r"""Allows to connect output_name input to the operator. output's name of the workflow to return @@ -354,13 +360,13 @@ class OutputsRemoteOperatorInstantiate(_Outputs): def __init__(self, op: Operator): super().__init__(remote_operator_instantiate._spec().outputs, op) - self._remote_workflow = Output( + self._remote_workflow: Output[Workflow] = Output( remote_operator_instantiate._spec().output_pin(0), 0, op ) self._outputs.append(self._remote_workflow) @property - def remote_workflow(self) -> Output: + def remote_workflow(self) -> Output[Workflow]: r"""Allows to get remote_workflow output of the operator remote workflow containing an image of the remote workflow and the protocols streams diff --git a/src/ansys/dpf/core/operators/utility/remote_workflow_instantiate.py b/src/ansys/dpf/core/operators/utility/remote_workflow_instantiate.py index 7c13b1c5a97..ef959bdce56 100644 --- a/src/ansys/dpf/core/operators/utility/remote_workflow_instantiate.py +++ b/src/ansys/dpf/core/operators/utility/remote_workflow_instantiate.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,11 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_sources import DataSources + from ansys.dpf.core.streams_container import StreamsContainer + from ansys.dpf.core.workflow import Workflow + class remote_workflow_instantiate(Operator): r"""Sends a local workflow to a remote process (and keep a local image of @@ -185,21 +191,21 @@ class InputsRemoteWorkflowInstantiate(_Inputs): def __init__(self, op: Operator): super().__init__(remote_workflow_instantiate._spec().inputs, op) - self._workflow_to_send = Input( + self._workflow_to_send: Input[Workflow | int] = Input( remote_workflow_instantiate._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._workflow_to_send) - self._streams_to_remote = Input( + self._streams_to_remote: Input[StreamsContainer] = Input( remote_workflow_instantiate._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._streams_to_remote) - self._data_sources_to_remote = Input( + self._data_sources_to_remote: Input[DataSources] = Input( remote_workflow_instantiate._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._data_sources_to_remote) @property - def workflow_to_send(self) -> Input: + def workflow_to_send(self) -> Input[Workflow | int]: r"""Allows to connect workflow_to_send input to the operator. local workflow to push to a remote or id of a remote workflow @@ -220,7 +226,7 @@ def workflow_to_send(self) -> Input: return self._workflow_to_send @property - def streams_to_remote(self) -> Input: + def streams_to_remote(self) -> Input[StreamsContainer]: r"""Allows to connect streams_to_remote input to the operator. Returns @@ -239,7 +245,7 @@ def streams_to_remote(self) -> Input: return self._streams_to_remote @property - def data_sources_to_remote(self) -> Input: + def data_sources_to_remote(self) -> Input[DataSources]: r"""Allows to connect data_sources_to_remote input to the operator. Returns @@ -272,13 +278,13 @@ class OutputsRemoteWorkflowInstantiate(_Outputs): def __init__(self, op: Operator): super().__init__(remote_workflow_instantiate._spec().outputs, op) - self._remote_workflow = Output( + self._remote_workflow: Output[Workflow] = Output( remote_workflow_instantiate._spec().output_pin(0), 0, op ) self._outputs.append(self._remote_workflow) @property - def remote_workflow(self) -> Output: + def remote_workflow(self) -> Output[Workflow]: r"""Allows to get remote_workflow output of the operator remote workflow containing an image of the remote workflow and the protocols streams diff --git a/src/ansys/dpf/core/operators/utility/remove_unnecessary_labels.py b/src/ansys/dpf/core/operators/utility/remove_unnecessary_labels.py index f2272adfdba..4424d44bcdb 100644 --- a/src/ansys/dpf/core/operators/utility/remove_unnecessary_labels.py +++ b/src/ansys/dpf/core/operators/utility/remove_unnecessary_labels.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class remove_unnecessary_labels(Operator): r"""Removes the selected label from a fields container. @@ -178,21 +182,21 @@ class InputsRemoveUnnecessaryLabels(_Inputs): def __init__(self, op: Operator): super().__init__(remove_unnecessary_labels._spec().inputs, op) - self._permissive = Input( + self._permissive: Input[bool] = Input( remove_unnecessary_labels._spec().input_pin(-1), -1, op, -1 ) self._inputs.append(self._permissive) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( remove_unnecessary_labels._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._label_to_remove = Input( + self._label_to_remove: Input[str] = Input( remove_unnecessary_labels._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._label_to_remove) @property - def permissive(self) -> Input: + def permissive(self) -> Input[bool]: r"""Allows to connect permissive input to the operator. If true, the operator does not throw an error if the label to be removed has more than one entry. @@ -213,7 +217,7 @@ def permissive(self) -> Input: return self._permissive @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Fields Container with the label to be removed. @@ -234,7 +238,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label_to_remove(self) -> Input: + def label_to_remove(self) -> Input[str]: r"""Allows to connect label_to_remove input to the operator. Label to be removed from the fields container. @@ -269,13 +273,13 @@ class OutputsRemoveUnnecessaryLabels(_Outputs): def __init__(self, op: Operator): super().__init__(remove_unnecessary_labels._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( remove_unnecessary_labels._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/scalars_to_field.py b/src/ansys/dpf/core/operators/utility/scalars_to_field.py index 86db9ba9ef0..cdb7e9dbdef 100644 --- a/src/ansys/dpf/core/operators/utility/scalars_to_field.py +++ b/src/ansys/dpf/core/operators/utility/scalars_to_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.scoping import Scoping + class scalars_to_field(Operator): r"""Create scalar or vector Field. @@ -226,23 +231,31 @@ class InputsScalarsToField(_Inputs): def __init__(self, op: Operator): super().__init__(scalars_to_field._spec().inputs, op) - self._double_or_vector_double = Input( + self._double_or_vector_double: Input[float] = Input( scalars_to_field._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._double_or_vector_double) - self._unit = Input(scalars_to_field._spec().input_pin(1), 1, op, -1) + self._unit: Input[str] = Input(scalars_to_field._spec().input_pin(1), 1, op, -1) self._inputs.append(self._unit) - self._location = Input(scalars_to_field._spec().input_pin(2), 2, op, -1) + self._location: Input[str] = Input( + scalars_to_field._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._location) - self._num_entity = Input(scalars_to_field._spec().input_pin(3), 3, op, -1) + self._num_entity: Input[int] = Input( + scalars_to_field._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._num_entity) - self._num_comp = Input(scalars_to_field._spec().input_pin(4), 4, op, -1) + self._num_comp: Input[int] = Input( + scalars_to_field._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._num_comp) - self._scoping = Input(scalars_to_field._spec().input_pin(5), 5, op, -1) + self._scoping: Input[Scoping] = Input( + scalars_to_field._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._scoping) @property - def double_or_vector_double(self) -> Input: + def double_or_vector_double(self) -> Input[float]: r"""Allows to connect double_or_vector_double input to the operator. Data of the field, default is 0-field. Specify a double to have a field of same value or specify directly the data vector. @@ -263,7 +276,7 @@ def double_or_vector_double(self) -> Input: return self._double_or_vector_double @property - def unit(self) -> Input: + def unit(self) -> Input[str]: r"""Allows to connect unit input to the operator. Unit symbol (m, Hz, kg, ...) @@ -284,7 +297,7 @@ def unit(self) -> Input: return self._unit @property - def location(self) -> Input: + def location(self) -> Input[str]: r"""Allows to connect location input to the operator. Location of the field ex 'Nodal', 'ElementalNodal', 'Elemental'... Default is 'numeric'. @@ -305,7 +318,7 @@ def location(self) -> Input: return self._location @property - def num_entity(self) -> Input: + def num_entity(self) -> Input[int]: r"""Allows to connect num_entity input to the operator. Number of field entities. Default is 1 or the size of the scoping in input if specified. @@ -326,7 +339,7 @@ def num_entity(self) -> Input: return self._num_entity @property - def num_comp(self) -> Input: + def num_comp(self) -> Input[int]: r"""Allows to connect num_comp input to the operator. Number of field components. Default is 1. @@ -347,7 +360,7 @@ def num_comp(self) -> Input: return self._num_comp @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Scoping. @@ -382,11 +395,13 @@ class OutputsScalarsToField(_Outputs): def __init__(self, op: Operator): super().__init__(scalars_to_field._spec().outputs, op) - self._field = Output(scalars_to_field._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + scalars_to_field._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/server_path.py b/src/ansys/dpf/core/operators/utility/server_path.py index 29e9fc1fcdb..8210fd05f7b 100644 --- a/src/ansys/dpf/core/operators/utility/server_path.py +++ b/src/ansys/dpf/core/operators/utility/server_path.py @@ -142,11 +142,11 @@ class InputsServerPath(_Inputs): def __init__(self, op: Operator): super().__init__(server_path._spec().inputs, op) - self._subpath = Input(server_path._spec().input_pin(0), 0, op, -1) + self._subpath: Input[int] = Input(server_path._spec().input_pin(0), 0, op, -1) self._inputs.append(self._subpath) @property - def subpath(self) -> Input: + def subpath(self) -> Input[int]: r"""Allows to connect subpath input to the operator. Subpath of the Dpf server. Supported values: 0 (default): root of the server, 1: "dpf/bin/platform", 2: "aisol/bin(dll)/platform", 3: "dpf/plugins", 4: "dpf/workflows". @@ -181,11 +181,11 @@ class OutputsServerPath(_Outputs): def __init__(self, op: Operator): super().__init__(server_path._spec().outputs, op) - self._path = Output(server_path._spec().output_pin(0), 0, op) + self._path: Output[str] = Output(server_path._spec().output_pin(0), 0, op) self._outputs.append(self._path) @property - def path(self) -> Output: + def path(self) -> Output[str]: r"""Allows to get path output of the operator Path to the requested folder in the Dpf server diff --git a/src/ansys/dpf/core/operators/utility/set_attribute.py b/src/ansys/dpf/core/operators/utility/set_attribute.py index 919f1dde992..5302e1bc4b0 100644 --- a/src/ansys/dpf/core/operators/utility/set_attribute.py +++ b/src/ansys/dpf/core/operators/utility/set_attribute.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class set_attribute(Operator): r"""Uses the FieldsContainer APIs to modify it. @@ -182,15 +186,21 @@ class InputsSetAttribute(_Inputs): def __init__(self, op: Operator): super().__init__(set_attribute._spec().inputs, op) - self._fields_container = Input(set_attribute._spec().input_pin(0), 0, op, -1) + self._fields_container: Input[FieldsContainer] = Input( + set_attribute._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._property_name = Input(set_attribute._spec().input_pin(1), 1, op, -1) + self._property_name: Input[str] = Input( + set_attribute._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._property_name) - self._property_identifier = Input(set_attribute._spec().input_pin(2), 2, op, -1) + self._property_identifier: Input[dict] = Input( + set_attribute._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._property_identifier) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -209,7 +219,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Supported property names are: "labels". @@ -230,7 +240,7 @@ def property_name(self) -> Input: return self._property_name @property - def property_identifier(self) -> Input: + def property_identifier(self) -> Input[dict]: r"""Allows to connect property_identifier input to the operator. Value of the property to be set : vector of string or LabelSpace for "labels". @@ -265,11 +275,13 @@ class OutputsSetAttribute(_Outputs): def __init__(self, op: Operator): super().__init__(set_attribute._spec().outputs, op) - self._fields_container = Output(set_attribute._spec().output_pin(0), 0, op) + self._fields_container: Output[FieldsContainer] = Output( + set_attribute._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns the modified FieldsContainer. diff --git a/src/ansys/dpf/core/operators/utility/set_property.py b/src/ansys/dpf/core/operators/utility/set_property.py index 1ded8946f40..29e06202cb6 100644 --- a/src/ansys/dpf/core/operators/utility/set_property.py +++ b/src/ansys/dpf/core/operators/utility/set_property.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -15,6 +16,13 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.data_tree import DataTree + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.scoping import Scoping + from ansys.dpf.core.time_freq_support import TimeFreqSupport + class set_property(Operator): r"""Sets a property to an input field/field container. A Fieldin pin 0, a @@ -28,7 +36,7 @@ class set_property(Operator): property_name: str Property to set. Accepted inputs are specific strings namely: 'unit', 'name', 'time_freq_support', 'scoping', 'header'. property: str or TimeFreqSupport or Scoping or DataTree or int or - float + float Property Value to set. Accepted inputs on this pin are: CTimeFreqSupport, CScoping, DataTree, int, double, string. Outputs @@ -186,15 +194,21 @@ class InputsSetProperty(_Inputs): def __init__(self, op: Operator): super().__init__(set_property._spec().inputs, op) - self._field = Input(set_property._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + set_property._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._property_name = Input(set_property._spec().input_pin(1), 1, op, -1) + self._property_name: Input[str] = Input( + set_property._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._property_name) - self._property = Input(set_property._spec().input_pin(2), 2, op, -1) + self._property: Input[ + str | TimeFreqSupport | Scoping | DataTree | int | float + ] = Input(set_property._spec().input_pin(2), 2, op, -1) self._inputs.append(self._property) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. Returns @@ -213,7 +227,7 @@ def field(self) -> Input: return self._field @property - def property_name(self) -> Input: + def property_name(self) -> Input[str]: r"""Allows to connect property_name input to the operator. Property to set. Accepted inputs are specific strings namely: 'unit', 'name', 'time_freq_support', 'scoping', 'header'. @@ -234,7 +248,9 @@ def property_name(self) -> Input: return self._property_name @property - def property(self) -> Input: + def property( + self, + ) -> Input[str | TimeFreqSupport | Scoping | DataTree | int | float]: r"""Allows to connect property input to the operator. Property Value to set. Accepted inputs on this pin are: CTimeFreqSupport, CScoping, DataTree, int, double, string. diff --git a/src/ansys/dpf/core/operators/utility/split_in_for_each_range.py b/src/ansys/dpf/core/operators/utility/split_in_for_each_range.py index 63040b3ffde..a6d250dfe4e 100644 --- a/src/ansys/dpf/core/operators/utility/split_in_for_each_range.py +++ b/src/ansys/dpf/core/operators/utility/split_in_for_each_range.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.dpf_operator import Operator + from ansys.dpf.core.scoping import Scoping + class split_in_for_each_range(Operator): r"""Split a scoping into several pieces so you can iterate it with a @@ -208,17 +213,23 @@ class InputsSplitInForEachRange(_Inputs): def __init__(self, op: Operator): super().__init__(split_in_for_each_range._spec().inputs, op) - self._iterable = Input(split_in_for_each_range._spec().input_pin(0), 0, op, -1) + self._iterable: Input = Input( + split_in_for_each_range._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._iterable) - self._operator_to_iterate = Input( + self._operator_to_iterate: Input[Operator] = Input( split_in_for_each_range._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._operator_to_iterate) - self._pin_index = Input(split_in_for_each_range._spec().input_pin(2), 2, op, -1) + self._pin_index: Input[int] = Input( + split_in_for_each_range._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._pin_index) - self._scoping = Input(split_in_for_each_range._spec().input_pin(3), 3, op, -1) + self._scoping: Input[Scoping] = Input( + split_in_for_each_range._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) - self._chunk_size = Input( + self._chunk_size: Input[int] = Input( split_in_for_each_range._spec().input_pin(4), 4, op, -1 ) self._inputs.append(self._chunk_size) @@ -245,7 +256,7 @@ def iterable(self) -> Input: return self._iterable @property - def operator_to_iterate(self) -> Input: + def operator_to_iterate(self) -> Input[Operator]: r"""Allows to connect operator_to_iterate input to the operator. Operator that must be reconnected with the range values. @@ -266,7 +277,7 @@ def operator_to_iterate(self) -> Input: return self._operator_to_iterate @property - def pin_index(self) -> Input: + def pin_index(self) -> Input[int]: r"""Allows to connect pin_index input to the operator. Returns @@ -285,7 +296,7 @@ def pin_index(self) -> Input: return self._pin_index @property - def scoping(self) -> Input: + def scoping(self) -> Input[Scoping]: r"""Allows to connect scoping input to the operator. Returns @@ -304,7 +315,7 @@ def scoping(self) -> Input: return self._scoping @property - def chunk_size(self) -> Input: + def chunk_size(self) -> Input[int]: r"""Allows to connect chunk_size input to the operator. Returns @@ -337,7 +348,9 @@ class OutputsSplitInForEachRange(_Outputs): def __init__(self, op: Operator): super().__init__(split_in_for_each_range._spec().outputs, op) - self._output = Output(split_in_for_each_range._spec().output_pin(0), 0, op) + self._output: Output = Output( + split_in_for_each_range._spec().output_pin(0), 0, op + ) self._outputs.append(self._output) @property diff --git a/src/ansys/dpf/core/operators/utility/strain_from_voigt.py b/src/ansys/dpf/core/operators/utility/strain_from_voigt.py index ba6ffa366bf..21ce31d3f2a 100644 --- a/src/ansys/dpf/core/operators/utility/strain_from_voigt.py +++ b/src/ansys/dpf/core/operators/utility/strain_from_voigt.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class strain_from_voigt(Operator): r"""Converts the strain field from Voigt notation into standard format. @@ -141,11 +146,13 @@ class InputsStrainFromVoigt(_Inputs): def __init__(self, op: Operator): super().__init__(strain_from_voigt._spec().inputs, op) - self._field = Input(strain_from_voigt._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + strain_from_voigt._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -180,11 +187,13 @@ class OutputsStrainFromVoigt(_Outputs): def __init__(self, op: Operator): super().__init__(strain_from_voigt._spec().outputs, op) - self._field = Output(strain_from_voigt._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output( + strain_from_voigt._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/strain_from_voigt_fc.py b/src/ansys/dpf/core/operators/utility/strain_from_voigt_fc.py index a6782bfdda7..8805c81fff3 100644 --- a/src/ansys/dpf/core/operators/utility/strain_from_voigt_fc.py +++ b/src/ansys/dpf/core/operators/utility/strain_from_voigt_fc.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,9 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + class strain_from_voigt_fc(Operator): r"""Converts the strain field from Voigt notation into standard format. @@ -141,13 +145,13 @@ class InputsStrainFromVoigtFc(_Inputs): def __init__(self, op: Operator): super().__init__(strain_from_voigt_fc._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( strain_from_voigt_fc._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. field or fields container with only one field is expected @@ -182,13 +186,13 @@ class OutputsStrainFromVoigtFc(_Outputs): def __init__(self, op: Operator): super().__init__(strain_from_voigt_fc._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( strain_from_voigt_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/txt_file_to_dpf.py b/src/ansys/dpf/core/operators/utility/txt_file_to_dpf.py index 94d83cffd00..d5ead775e7d 100644 --- a/src/ansys/dpf/core/operators/utility/txt_file_to_dpf.py +++ b/src/ansys/dpf/core/operators/utility/txt_file_to_dpf.py @@ -149,11 +149,13 @@ class InputsTxtFileToDpf(_Inputs): def __init__(self, op: Operator): super().__init__(txt_file_to_dpf._spec().inputs, op) - self._input_string = Input(txt_file_to_dpf._spec().input_pin(0), 0, op, -1) + self._input_string: Input[str] = Input( + txt_file_to_dpf._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._input_string) @property - def input_string(self) -> Input: + def input_string(self) -> Input[str]: r"""Allows to connect input_string input to the operator. ex: 'double:1.0', 'int:1', 'vector:1.0;1.0' @@ -189,9 +191,9 @@ class OutputsTxtFileToDpf(_Outputs): def __init__(self, op: Operator): super().__init__(txt_file_to_dpf._spec().outputs, op) - self._any_output1 = Output(txt_file_to_dpf._spec().output_pin(0), 0, op) + self._any_output1: Output = Output(txt_file_to_dpf._spec().output_pin(0), 0, op) self._outputs.append(self._any_output1) - self._any_output2 = Output(txt_file_to_dpf._spec().output_pin(1), 1, op) + self._any_output2: Output = Output(txt_file_to_dpf._spec().output_pin(1), 1, op) self._outputs.append(self._any_output2) @property diff --git a/src/ansys/dpf/core/operators/utility/unitary_field.py b/src/ansys/dpf/core/operators/utility/unitary_field.py index 7767776bf04..b50571fdbea 100644 --- a/src/ansys/dpf/core/operators/utility/unitary_field.py +++ b/src/ansys/dpf/core/operators/utility/unitary_field.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.field import Field + from ansys.dpf.core.fields_container import FieldsContainer + class unitary_field(Operator): r"""Takes a field and returns another field of scalars in the same location @@ -143,11 +148,13 @@ class InputsUnitaryField(_Inputs): def __init__(self, op: Operator): super().__init__(unitary_field._spec().inputs, op) - self._field = Input(unitary_field._spec().input_pin(0), 0, op, -1) + self._field: Input[Field | FieldsContainer] = Input( + unitary_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) @property - def field(self) -> Input: + def field(self) -> Input[Field | FieldsContainer]: r"""Allows to connect field input to the operator. field or fields container with only one field is expected @@ -182,11 +189,11 @@ class OutputsUnitaryField(_Outputs): def __init__(self, op: Operator): super().__init__(unitary_field._spec().outputs, op) - self._field = Output(unitary_field._spec().output_pin(0), 0, op) + self._field: Output[Field] = Output(unitary_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property - def field(self) -> Output: + def field(self) -> Output[Field]: r"""Allows to get field output of the operator Returns diff --git a/src/ansys/dpf/core/operators/utility/weighted_merge_fields_by_label.py b/src/ansys/dpf/core/operators/utility/weighted_merge_fields_by_label.py index e6121787066..0e76ed28c36 100644 --- a/src/ansys/dpf/core/operators/utility/weighted_merge_fields_by_label.py +++ b/src/ansys/dpf/core/operators/utility/weighted_merge_fields_by_label.py @@ -5,6 +5,7 @@ """ from __future__ import annotations +from typing import TYPE_CHECKING from warnings import warn from ansys.dpf.core.dpf_operator import Operator @@ -14,6 +15,10 @@ from ansys.dpf.core.config import Config from ansys.dpf.core.server_types import AnyServerType +if TYPE_CHECKING: + from ansys.dpf.core.fields_container import FieldsContainer + from ansys.dpf.core.property_field import PropertyField + class weighted_merge_fields_by_label(Operator): r"""Performs a weighted merge on fields of a fields container that share the @@ -231,33 +236,33 @@ class InputsWeightedMergeFieldsByLabel(_Inputs): def __init__(self, op: Operator): super().__init__(weighted_merge_fields_by_label._spec().inputs, op) - self._fields_container = Input( + self._fields_container: Input[FieldsContainer] = Input( weighted_merge_fields_by_label._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._label = Input( + self._label: Input[str] = Input( weighted_merge_fields_by_label._spec().input_pin(1), 1, op, -1 ) self._inputs.append(self._label) - self._merged_field_support = Input( + self._merged_field_support: Input = Input( weighted_merge_fields_by_label._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._merged_field_support) - self._sum_merge = Input( + self._sum_merge: Input[bool] = Input( weighted_merge_fields_by_label._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._sum_merge) - self._weights1 = Input( + self._weights1: Input[PropertyField] = Input( weighted_merge_fields_by_label._spec().input_pin(1000), 1000, op, 0 ) self._inputs.append(self._weights1) - self._weights2 = Input( + self._weights2: Input[PropertyField] = Input( weighted_merge_fields_by_label._spec().input_pin(1001), 1001, op, 1 ) self._inputs.append(self._weights2) @property - def fields_container(self) -> Input: + def fields_container(self) -> Input[FieldsContainer]: r"""Allows to connect fields_container input to the operator. Returns @@ -276,7 +281,7 @@ def fields_container(self) -> Input: return self._fields_container @property - def label(self) -> Input: + def label(self) -> Input[str]: r"""Allows to connect label input to the operator. Label identifier that should be merged. @@ -318,7 +323,7 @@ def merged_field_support(self) -> Input: return self._merged_field_support @property - def sum_merge(self) -> Input: + def sum_merge(self) -> Input[bool]: r"""Allows to connect sum_merge input to the operator. Default is false. If true, redundant quantities are summed instead of being ignored. @@ -339,7 +344,7 @@ def sum_merge(self) -> Input: return self._sum_merge @property - def weights1(self) -> Input: + def weights1(self) -> Input[PropertyField]: r"""Allows to connect weights1 input to the operator. Weights to apply to each field from pin 1000 to ... @@ -360,7 +365,7 @@ def weights1(self) -> Input: return self._weights1 @property - def weights2(self) -> Input: + def weights2(self) -> Input[PropertyField]: r"""Allows to connect weights2 input to the operator. Weights to apply to each field from pin 1000 to ... @@ -395,13 +400,13 @@ class OutputsWeightedMergeFieldsByLabel(_Outputs): def __init__(self, op: Operator): super().__init__(weighted_merge_fields_by_label._spec().outputs, op) - self._fields_container = Output( + self._fields_container: Output[FieldsContainer] = Output( weighted_merge_fields_by_label._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @property - def fields_container(self) -> Output: + def fields_container(self) -> Output[FieldsContainer]: r"""Allows to get fields_container output of the operator Returns diff --git a/src/ansys/dpf/core/outputs.py b/src/ansys/dpf/core/outputs.py index d1a1ae73e94..3b635ebb4b7 100644 --- a/src/ansys/dpf/core/outputs.py +++ b/src/ansys/dpf/core/outputs.py @@ -23,13 +23,16 @@ """Outputs.""" import re +from typing import Generic, TypeVar from ansys.dpf.core.common import types from ansys.dpf.core.mapping_types import map_types_to_python from ansys.dpf.core.operator_specification import PinSpecification +T = TypeVar("T") -class Output: + +class Output(Generic[T]): """ Intermediate class internally instantiated by the :class:`ansys.dpf.core.dpf_operator.Operator`. @@ -56,7 +59,7 @@ def __init__(self, spec, pin, operator): self._python_expected_types.append(map_types_to_python[cpp_type]) self.aliases = self._spec.aliases - def get_data(self): + def get_data(self) -> T: """Retrieve the output of the operator.""" type_output = self._spec.type_names[0] @@ -98,7 +101,7 @@ def get_data(self): ] return derived_types[0][0](output) - def __call__(self): + def __call__(self) -> T: """Allow instances of the class to be callable for data retrieval purposes.""" return self.get_data()