diff --git a/src/ansys/dpf/core/__init__.py b/src/ansys/dpf/core/__init__.py index bf590a5a893..c7bd6fe070f 100644 --- a/src/ansys/dpf/core/__init__.py +++ b/src/ansys/dpf/core/__init__.py @@ -92,6 +92,7 @@ from ansys.dpf.core.mesh_info import MeshInfo from ansys.dpf.core.generic_data_container import GenericDataContainer +from ansys.dpf.core.dpf_operator import available_operator_names # for matplotlib # solves "QApplication: invalid style override passed, ignoring it." diff --git a/src/ansys/dpf/core/_custom_operators_helpers.py b/src/ansys/dpf/core/_custom_operators_helpers.py index c80d112c2aa..46ad277b4e7 100644 --- a/src/ansys/dpf/core/_custom_operators_helpers.py +++ b/src/ansys/dpf/core/_custom_operators_helpers.py @@ -17,7 +17,7 @@ meshes_container, result_info, string_field, - custom_type_field, + custom_type_field, generic_data_container, ) external_operator_api = external_operator_capi.ExternalOperatorCAPI @@ -70,6 +70,10 @@ def __operator_main__(operator_functor, data): custom_type_field.CustomTypeField, external_operator_api.external_operator_put_out_custom_type_field, ), + ( + generic_data_container.GenericDataContainer, + external_operator_api.external_operator_put_out_generic_data_container, + ) ] _type_to_input_method = [ @@ -140,6 +144,11 @@ def __operator_main__(operator_functor, data): external_operator_api.external_operator_get_in_data_tree, "data_tree", ), + ( + generic_data_container.GenericDataContainer, + external_operator_api.external_operator_get_in_generic_data_container, + "generic_data_container", + ) # TO DO : (dpf_operator.Operator, external_operator_api.external_operator_get_in_operator, # "operator"), ] diff --git a/src/ansys/dpf/core/custom_operator.py b/src/ansys/dpf/core/custom_operator.py index cdfc5e93cd4..e154d03f6c7 100644 --- a/src/ansys/dpf/core/custom_operator.py +++ b/src/ansys/dpf/core/custom_operator.py @@ -10,7 +10,9 @@ import ctypes import os import pathlib +import re import shutil +import tempfile import warnings import zipfile @@ -37,7 +39,9 @@ from ansys.dpf.gate import object_handler, capi, dpf_vector, integral_types -def update_virtual_environment_for_custom_operators(restore_original: bool = False): +def update_virtual_environment_for_custom_operators( + restore_original: bool = False, +): """Updates the dpf-site.zip file used to start a venv for Python custom operators to run in. It updates the site-packages in dpf-site.zip with the site-packages of the current venv. @@ -54,6 +58,7 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal """ # Get the path to the dpf-site.zip in the current DPF server server = dpf.server.get_or_create_server(dpf.SERVER) + print(server.ansys_path) if server.has_client(): raise NotImplementedError( "Updating the dpf-site.zip of a DPF Server is only available when InProcess." @@ -71,6 +76,10 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal else: warnings.warn("No original dpf-site.zip found. Current is most likely the original.") else: + # Store original dpf-site.zip for this DPF Server if no original is stored + if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)): + os.mkdir(os.path.dirname(original_dpf_site_zip_path)) + shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path) # Get the current paths to site_packages import site paths_to_current_site_packages = site.getsitepackages() @@ -83,17 +92,59 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal if current_site_packages_path is None: warnings.warn("Could not find a currently loaded site-packages folder to update from.") return - # Store original dpf-site.zip for this DPF Server if no original is stored - if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)): - os.mkdir(os.path.dirname(original_dpf_site_zip_path)) - shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path) - # Zip the current site-packages at the destination - with zipfile.ZipFile(current_dpf_site_zip_path, mode="w") as archive: - for file_path in current_site_packages_path.rglob("*"): - archive.write( - filename=file_path, - arcname=file_path.relative_to(current_site_packages_path) - ) + # If an ansys.dpf.core.path file exists, then the installation is editable + path_file = os.path.join(current_site_packages_path, "ansys.dpf.core.pth") + if os.path.exists(path_file): + # Treat editable installation of ansys-dpf-core + with open(path_file, "r") as f: + current_site_packages_path = f.readline() + with tempfile.TemporaryDirectory() as tmpdir: + os.mkdir(os.path.join(tmpdir, "ansys_dpf_core")) + ansys_dir = os.path.join(tmpdir, "ansys_dpf_core") + os.mkdir(os.path.join(ansys_dir, "ansys")) + os.mkdir(os.path.join(ansys_dir, "ansys", "dpf")) + os.mkdir(os.path.join(ansys_dir, "ansys", "grpc")) + shutil.copytree( + src=os.path.join(current_site_packages_path, "ansys", "dpf", "core"), + dst=os.path.join(ansys_dir, "ansys", "dpf", "core"), + ignore=lambda directory, contents: ["__pycache__", "result_files"], + ) + shutil.copytree( + src=os.path.join(current_site_packages_path, "ansys", "dpf", "gate"), + dst=os.path.join(ansys_dir, "ansys", "dpf", "gate"), + ignore=lambda directory, contents: ["__pycache__"], + ) + shutil.copytree( + src=os.path.join(current_site_packages_path, "ansys", "grpc", "dpf"), + dst=os.path.join(ansys_dir, "ansys", "grpc", "dpf"), + ignore=lambda directory, contents: ["__pycache__"], + ) + # Find the .dist_info folder + pattern = re.compile(r'^ansys_dpf_core\S*') + for p in pathlib.Path(current_site_packages_path).iterdir(): + if p.is_dir(): + # print(p.stem) + if re.search(pattern, p.stem): + dist_info_path = p + break + shutil.copytree( + src=dist_info_path, + dst=os.path.join(ansys_dir, dist_info_path.name), + ) + # Zip the files as dpf-site.zip + base_name = os.path.join(tmpdir, "ansys_dpf_core_zip") + base_dir = "." + root_dir = os.path.join(tmpdir, "ansys_dpf_core") # OK + shutil.make_archive(base_name=base_name, root_dir=root_dir, base_dir=base_dir, format='zip') + # Include files of interest from the original dpf-site.zip and the ansys_dpf_core.zip + with zipfile.ZipFile(current_dpf_site_zip_path, "w") as archive: + with zipfile.ZipFile(original_dpf_site_zip_path, mode="r") as original: + for item in original.infolist(): + if "ansys" not in item.filename: + archive.writestr(item, original.read(item)) + with zipfile.ZipFile(base_name+'.zip', mode="r") as original: + for item in original.infolist(): + archive.writestr(item, original.read(item)) def record_operator(operator_type, *args) -> None: diff --git a/src/ansys/dpf/core/dpf_operator.py b/src/ansys/dpf/core/dpf_operator.py index ea5e515f8ef..3b8f4ad0edd 100644 --- a/src/ansys/dpf/core/dpf_operator.py +++ b/src/ansys/dpf/core/dpf_operator.py @@ -529,17 +529,26 @@ def get_output(self, pin=0, output_type=None): for type_tuple in self._type_to_output_method: if output_type is type_tuple[0]: if len(type_tuple) >= 3: + internal_obj = type_tuple[1](self, pin) + if internal_obj is None: + self._progress_thread = None + return if isinstance(type_tuple[2], str): - parameters = {type_tuple[2]: type_tuple[1](self, pin)} + parameters = {type_tuple[2]: internal_obj} out = output_type(**parameters, server=self._server) else: - out = type_tuple[2](type_tuple[1](self, pin)) + out = type_tuple[2](internal_obj) if out is None: + internal_obj = type_tuple[1](self, pin) + if internal_obj is None: + self._progress_thread = None + return try: - return output_type(type_tuple[1](self, pin), server=self._server) + return output_type(internal_obj, server=self._server) except TypeError: self._progress_thread = None - return output_type(type_tuple[1](self, pin)) + return output_type(internal_obj) + if out is not None: self._progress_thread = None return out diff --git a/src/ansys/dpf/core/server.py b/src/ansys/dpf/core/server.py index 31a0f937e6e..65c5b6f170d 100644 --- a/src/ansys/dpf/core/server.py +++ b/src/ansys/dpf/core/server.py @@ -11,6 +11,7 @@ import inspect import warnings import traceback +from typing import Union from ansys import dpf @@ -22,7 +23,7 @@ ServerFactory, CommunicationProtocols, ) -from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER +from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER, BaseServer from ansys.dpf.core import server_context @@ -382,7 +383,7 @@ def connect(): raise e -def get_or_create_server(server): +def get_or_create_server(server: BaseServer) -> Union[BaseServer, None]: """Returns the given server or if None, creates a new one. Parameters diff --git a/src/ansys/dpf/core/server_types.py b/src/ansys/dpf/core/server_types.py index 8bf6a505c03..9f966391c98 100644 --- a/src/ansys/dpf/core/server_types.py +++ b/src/ansys/dpf/core/server_types.py @@ -416,7 +416,7 @@ def available_api_types(self): pass @abc.abstractmethod - def get_api_for_type(self, c_api, grpc_api): + def get_api_for_type(self, capi, grpcapi): pass @property diff --git a/tests/test_operator.py b/tests/test_operator.py index 24a0fa222e6..0c45d47ca67 100644 --- a/tests/test_operator.py +++ b/tests/test_operator.py @@ -113,6 +113,7 @@ def test_connect_get_out_all_types_operator(server_type): dpf.core.TimeFreqSupport(server=server_type), dpf.core.Workflow(server=server_type), dpf.core.DataTree(server=server_type), + # dpf.core.GenericDataContainer(server=server_type), # Fails for LegacyGrpc dpf.core.StringField(server=server_type), dpf.core.CustomTypeField(np.float64, server=server_type), ] @@ -261,18 +262,6 @@ def test_connect_operator_output_operator(server_type): assert len(fOut.data) == 3 -@pytest.mark.skipif( - not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, - reason="Connect an operator as an input is supported starting server version 7.0", -) -def test_connect_generic_data_container_operator(server_type): - op = dpf.core.Operator("forward", server=server_type) - inpt = dpf.core.GenericDataContainer(server=server_type) - op.connect(0, inpt) - output = op.get_output(0, dpf.core.types.generic_data_container) - assert output is not None - - @pytest.mark.skipif( not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_6_2, reason="Connect an operator as an input is supported starting server version 6.2", @@ -632,19 +621,19 @@ def test_connect_model(plate_msup, server_type): assert np.allclose(fc[0].data[0], [5.12304110e-14, 3.64308310e-04, 5.79805917e-06]) -def test_operator_several_output_types(plate_msup, server_type): - inpt = dpf.core.Field(nentities=3, server=server_type) +def test_operator_several_output_types_remote(plate_msup, server_type_remote_process): + inpt = dpf.core.Field(nentities=3, server=server_type_remote_process) inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9] inpt.scoping.ids = [1, 2, 3] inpt.unit = "m" - op = dpf.core.Operator("unit_convert", server=server_type) + op = dpf.core.Operator("unit_convert", server=server_type_remote_process) op.inputs.entity_to_convert(inpt) op.inputs.unit_name("mm") f = op.outputs.converted_entity_as_field() assert f.unit == "mm" assert np.allclose(f.data.flatten("C"), np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]) * 1000) - model = dpf.core.Model(plate_msup, server=server_type) + model = dpf.core.Model(plate_msup, server=server_type_remote_process) din = model.metadata.meshed_region.nodes.coordinates_field.data assert model.metadata.meshed_region.nodes.coordinates_field.unit == "m" @@ -657,26 +646,6 @@ def test_operator_several_output_types(plate_msup, server_type): assert np.allclose(m.nodes.coordinates_field.data, np.array(din) * 1000) -def test_operator_several_output_types2(server_type): - inpt = dpf.core.Field(nentities=3, server=server_type) - inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9] - inpt.scoping.ids = [1, 2, 3] - inpt.unit = "m" - uc = dpf.core.Operator("Rescope", server=server_type) - uc.inputs.fields(inpt) - uc.inputs.mesh_scoping(dpf.core.Scoping(ids=[1, 2])) - f = uc.outputs.fields_as_field() - assert np.allclose(f.data.flatten("C"), [1, 2, 3, 4, 5, 6]) - - fc = dpf.core.FieldsContainer(server=server_type) - fc.labels = ["time"] - fc.add_field({"time": 1}, inpt) - - uc.inputs.fields(fc) - fc2 = uc.outputs.fields_as_fields_container() - assert np.allclose(fc2[0].data.flatten("C"), [1, 2, 3, 4, 5, 6]) - - def test_create_operator_config(server_type): conf = dpf.core.Config(server=server_type) assert conf.config_option_documentation("mutex") == "" @@ -865,14 +834,23 @@ def test_connect_get_output_double_list_operator(server_type): @conftest.raises_for_servers_version_under("4.0") -def test_connect_get_output_data_tree_operator(): - d = dpf.core.DataTree({"name": "Paul"}) - op = dpf.core.operators.utility.forward(d) +def test_connect_get_output_data_tree_operator(server_type): + d = dpf.core.DataTree({"name": "Paul"}, server=server_type) + op = dpf.core.operators.utility.forward(d, server=server_type) d_out = op.get_output(0, dpf.core.types.data_tree) assert d_out.get_as("name") == "Paul" -def test_operator_several_output_types(plate_msup): +@conftest.raises_for_servers_version_under("7.0") +def test_connect_get_output_generic_data_container_operator(server_clayer): + gdc = dpf.core.GenericDataContainer(server=server_clayer) + gdc.set_property("n", 1) + op = dpf.core.operators.utility.forward(gdc, server=server_clayer) + gdc_out = op.get_output(0, dpf.core.types.generic_data_container) + assert gdc_out.get_property("n") == 1 + + +def test_operator_several_output_types_copy(plate_msup): inpt = dpf.core.Field(nentities=3) inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9] inpt.scoping.ids = [1, 2, 3] diff --git a/tests/test_python_plugins.py b/tests/test_python_plugins.py index 714145cbe6a..ca25b705828 100644 --- a/tests/test_python_plugins.py +++ b/tests/test_python_plugins.py @@ -5,6 +5,7 @@ from conftest import SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0 from ansys.dpf import core as dpf import conftest +from ansys.dpf.core.custom_operator import update_virtual_environment_for_custom_operators from ansys.dpf.core.errors import DPFServerException from ansys.dpf.core.operator_specification import ( CustomSpecification, @@ -26,6 +27,9 @@ if platform.system() == "Linux": pytest.skip("Known failures for the Ubuntu-latest GitHub pipelines", allow_module_level=True) +update_virtual_environment_for_custom_operators(restore_original=True) +update_virtual_environment_for_custom_operators() + @pytest.fixture(scope="module") def load_all_types_plugin(testfiles_dir): @@ -200,6 +204,18 @@ def test_data_tree(server_type_remote_process, testfiles_dir): assert dt.get_as("name") == "Paul" +@pytest.mark.skipif(not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, reason="Requires DPF 7.0") +def test_generic_data_container(server_clayer_remote_process, testfiles_dir): + load_all_types_plugin_with_serv(server_clayer_remote_process, testfiles_dir) + gdc = dpf.GenericDataContainer(server=server_clayer_remote_process) + gdc.set_property(property_name="n", prop=1) + op = dpf.Operator("custom_forward_generic_data_container", server=server_clayer_remote_process) + op.connect(0, gdc) + gdc2: dpf.GenericDataContainer = op.get_output(0, dpf.types.generic_data_container) + assert gdc2 is not None + assert gdc2.get_property("n") == 1 + + @conftest.raises_for_servers_version_under("4.0") def test_syntax_error(server_type_remote_process, testfiles_dir): dpf.load_library( diff --git a/tests/testfiles/pythonPlugins/all_types/dpf_types_op.py b/tests/testfiles/pythonPlugins/all_types/dpf_types_op.py index ea7d0fb4c20..0cbdc45f013 100644 --- a/tests/testfiles/pythonPlugins/all_types/dpf_types_op.py +++ b/tests/testfiles/pythonPlugins/all_types/dpf_types_op.py @@ -10,6 +10,7 @@ types, workflow, data_tree, + generic_data_container, ) @@ -192,3 +193,20 @@ def specification(self): @property def name(self): return "custom_forward_data_tree" + + +class ForwardGenericDataContainerOperator(CustomOperatorBase): + def run(self): + f = self.get_input(0, generic_data_container.GenericDataContainer) + assert not f is None + f = self.get_input(0, types.generic_data_container) + self.set_output(0, f) + self.set_succeeded() + + @property + def specification(self): + return None + + @property + def name(self): + return "custom_forward_generic_data_container" diff --git a/tests/testfiles/pythonPlugins/all_types/load_operators_func.py b/tests/testfiles/pythonPlugins/all_types/load_operators_func.py index 0a35e8461c6..c99a95edf2c 100644 --- a/tests/testfiles/pythonPlugins/all_types/load_operators_func.py +++ b/tests/testfiles/pythonPlugins/all_types/load_operators_func.py @@ -23,3 +23,4 @@ def load_operators(*args): record_operator(dpf_types_op.ForwardMeshesContainerOperator, *args) record_operator(dpf_types_op.ForwardWorkflowOperator, *args) record_operator(dpf_types_op.ForwardDataTreeOperator, *args) + record_operator(dpf_types_op.ForwardGenericDataContainerOperator, *args)