Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
ceb7ba8
Make available_operator_names available at root level
PProfizi Jan 8, 2024
f5f9ad8
Add typehint to get_or_create_server
PProfizi Jan 8, 2024
9d008bf
Fix get_api_for_type argument naming
PProfizi Jan 8, 2024
7f7e3f1
Allow GenericDataContainer as output of custom_operator
PProfizi Jan 9, 2024
2e1ad5f
Add test and add GDC as input. Waiting for dpf-site.zip update feature
PProfizi Jan 25, 2024
950780e
Merge branch 'master' into fix/custom_plugin_fixes
PProfizi Jan 25, 2024
382528e
Change update of dpf-site.zip to only change ansys-dpf-core (WIP)
PProfizi Jan 25, 2024
1825753
Update of dpf-site.zip works for non-editable install of ansys-dpf-core
PProfizi Jan 25, 2024
0168154
Use update of dpf-site.zip in test_python_plugins.py
PProfizi Jan 25, 2024
248e988
Fix type_to_input method for generic_data_container in _custom_operat…
PProfizi Jan 26, 2024
34e00a2
Working dpf-site.zip update when ansys-dpf-core installed non-editable
PProfizi Jan 26, 2024
842eeeb
Working update of dpf-site.zip for both editable and non-editable ins…
PProfizi Jan 26, 2024
884e5ec
Merge branch 'master' into fix/custom_plugin_fixes
PProfizi Jan 26, 2024
f85a7ab
Skip test for DPF<7.0
PProfizi Jan 26, 2024
2db78ed
Merge remote-tracking branch 'origin/fix/custom_plugin_fixes' into fi…
PProfizi Jan 26, 2024
8ca296c
Add test_set_get_int_generic_data_container
PProfizi Jan 26, 2024
5219ccc
Revert "Add test_set_get_int_generic_data_container"
PProfizi Jan 26, 2024
0ec64b6
Change operator.get_output to return None when response from server i…
PProfizi Jan 26, 2024
7e11a47
Skip LegacyGrpc in tests on GenericDataContainer as output of an Oper…
PProfizi Jan 26, 2024
36bdaa2
Remove duplicate
PProfizi Jan 26, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/ansys/dpf/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@
from ansys.dpf.core.mesh_info import MeshInfo
from ansys.dpf.core.generic_data_container import GenericDataContainer

from ansys.dpf.core.dpf_operator import available_operator_names

# for matplotlib
# solves "QApplication: invalid style override passed, ignoring it."
Expand Down
11 changes: 10 additions & 1 deletion src/ansys/dpf/core/_custom_operators_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
meshes_container,
result_info,
string_field,
custom_type_field,
custom_type_field, generic_data_container,
)

external_operator_api = external_operator_capi.ExternalOperatorCAPI
Expand Down Expand Up @@ -70,6 +70,10 @@ def __operator_main__(operator_functor, data):
custom_type_field.CustomTypeField,
external_operator_api.external_operator_put_out_custom_type_field,
),
(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@PProfizi is it tested somewhere else?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@cbellot000 CodeCov says yes, which is weird because it is not tested explicitly.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@cbellot000 BTW why do we have a type_to_output specific to the custom operators?

Copy link
Contributor Author

@PProfizi PProfizi Jan 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@cbellot000 I just realized that to be be able to test this (and for this to even work), the changes need to be propagated to dpf-site.zip.
So either I add the capability to update the dpf-site.zip based on the current venv as we discussed previously, and use it at the start of test_python_plugins.py,
or we have to merge it first, then wait for a server sync loop to put it in dpf-site.zip and only then add the tests.
I'd tend to go towards the first proposal, which means I have to code and merge a PR with the dpf-site.zip update feature before this one.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

See #1379

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see then, you can maybe add the test ad mark it as skip this way we can remove the skip later?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@cbellot000 BTW why do we have a type_to_output specific to the custom operators?

I don't understand, where would you put "external_operator_put_out_generic_data_container" instead?

generic_data_container.GenericDataContainer,
external_operator_api.external_operator_put_out_generic_data_container,
)
]

_type_to_input_method = [
Expand Down Expand Up @@ -140,6 +144,11 @@ def __operator_main__(operator_functor, data):
external_operator_api.external_operator_get_in_data_tree,
"data_tree",
),
(
generic_data_container.GenericDataContainer,
external_operator_api.external_operator_get_in_generic_data_container,
"generic_data_container",
)
# TO DO : (dpf_operator.Operator, external_operator_api.external_operator_get_in_operator,
# "operator"),
]
75 changes: 63 additions & 12 deletions src/ansys/dpf/core/custom_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@
import ctypes
import os
import pathlib
import re
import shutil
import tempfile
import warnings
import zipfile

Expand All @@ -37,7 +39,9 @@
from ansys.dpf.gate import object_handler, capi, dpf_vector, integral_types


def update_virtual_environment_for_custom_operators(restore_original: bool = False):
def update_virtual_environment_for_custom_operators(
restore_original: bool = False,
):
"""Updates the dpf-site.zip file used to start a venv for Python custom operators to run in.

It updates the site-packages in dpf-site.zip with the site-packages of the current venv.
Expand All @@ -54,6 +58,7 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal
"""
# Get the path to the dpf-site.zip in the current DPF server
server = dpf.server.get_or_create_server(dpf.SERVER)
print(server.ansys_path)
if server.has_client():
raise NotImplementedError(
"Updating the dpf-site.zip of a DPF Server is only available when InProcess."
Expand All @@ -71,6 +76,10 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal
else:
warnings.warn("No original dpf-site.zip found. Current is most likely the original.")
else:
# Store original dpf-site.zip for this DPF Server if no original is stored
if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)):
os.mkdir(os.path.dirname(original_dpf_site_zip_path))
shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path)
# Get the current paths to site_packages
import site
paths_to_current_site_packages = site.getsitepackages()
Expand All @@ -83,17 +92,59 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal
if current_site_packages_path is None:
warnings.warn("Could not find a currently loaded site-packages folder to update from.")
return
# Store original dpf-site.zip for this DPF Server if no original is stored
if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)):
os.mkdir(os.path.dirname(original_dpf_site_zip_path))
shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path)
# Zip the current site-packages at the destination
with zipfile.ZipFile(current_dpf_site_zip_path, mode="w") as archive:
for file_path in current_site_packages_path.rglob("*"):
archive.write(
filename=file_path,
arcname=file_path.relative_to(current_site_packages_path)
)
# If an ansys.dpf.core.path file exists, then the installation is editable
path_file = os.path.join(current_site_packages_path, "ansys.dpf.core.pth")
if os.path.exists(path_file):
# Treat editable installation of ansys-dpf-core
with open(path_file, "r") as f:
current_site_packages_path = f.readline()
with tempfile.TemporaryDirectory() as tmpdir:
os.mkdir(os.path.join(tmpdir, "ansys_dpf_core"))
ansys_dir = os.path.join(tmpdir, "ansys_dpf_core")
os.mkdir(os.path.join(ansys_dir, "ansys"))
os.mkdir(os.path.join(ansys_dir, "ansys", "dpf"))
os.mkdir(os.path.join(ansys_dir, "ansys", "grpc"))
shutil.copytree(
src=os.path.join(current_site_packages_path, "ansys", "dpf", "core"),
dst=os.path.join(ansys_dir, "ansys", "dpf", "core"),
ignore=lambda directory, contents: ["__pycache__", "result_files"],
)
shutil.copytree(
src=os.path.join(current_site_packages_path, "ansys", "dpf", "gate"),
dst=os.path.join(ansys_dir, "ansys", "dpf", "gate"),
ignore=lambda directory, contents: ["__pycache__"],
)
shutil.copytree(
src=os.path.join(current_site_packages_path, "ansys", "grpc", "dpf"),
dst=os.path.join(ansys_dir, "ansys", "grpc", "dpf"),
ignore=lambda directory, contents: ["__pycache__"],
)
# Find the .dist_info folder
pattern = re.compile(r'^ansys_dpf_core\S*')
for p in pathlib.Path(current_site_packages_path).iterdir():
if p.is_dir():
# print(p.stem)
if re.search(pattern, p.stem):
dist_info_path = p
break
shutil.copytree(
src=dist_info_path,
dst=os.path.join(ansys_dir, dist_info_path.name),
)
# Zip the files as dpf-site.zip
base_name = os.path.join(tmpdir, "ansys_dpf_core_zip")
base_dir = "."
root_dir = os.path.join(tmpdir, "ansys_dpf_core") # OK
shutil.make_archive(base_name=base_name, root_dir=root_dir, base_dir=base_dir, format='zip')
# Include files of interest from the original dpf-site.zip and the ansys_dpf_core.zip
with zipfile.ZipFile(current_dpf_site_zip_path, "w") as archive:
with zipfile.ZipFile(original_dpf_site_zip_path, mode="r") as original:
for item in original.infolist():
if "ansys" not in item.filename:
archive.writestr(item, original.read(item))
with zipfile.ZipFile(base_name+'.zip', mode="r") as original:
for item in original.infolist():
archive.writestr(item, original.read(item))


def record_operator(operator_type, *args) -> None:
Expand Down
17 changes: 13 additions & 4 deletions src/ansys/dpf/core/dpf_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,17 +529,26 @@ def get_output(self, pin=0, output_type=None):
for type_tuple in self._type_to_output_method:
if output_type is type_tuple[0]:
if len(type_tuple) >= 3:
internal_obj = type_tuple[1](self, pin)
if internal_obj is None:
self._progress_thread = None
return
if isinstance(type_tuple[2], str):
parameters = {type_tuple[2]: type_tuple[1](self, pin)}
parameters = {type_tuple[2]: internal_obj}
out = output_type(**parameters, server=self._server)
else:
out = type_tuple[2](type_tuple[1](self, pin))
out = type_tuple[2](internal_obj)
if out is None:
internal_obj = type_tuple[1](self, pin)
if internal_obj is None:
self._progress_thread = None
return
try:
return output_type(type_tuple[1](self, pin), server=self._server)
return output_type(internal_obj, server=self._server)
except TypeError:
self._progress_thread = None
return output_type(type_tuple[1](self, pin))
return output_type(internal_obj)

if out is not None:
self._progress_thread = None
return out
Expand Down
5 changes: 3 additions & 2 deletions src/ansys/dpf/core/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import inspect
import warnings
import traceback
from typing import Union

from ansys import dpf

Expand All @@ -22,7 +23,7 @@
ServerFactory,
CommunicationProtocols,
)
from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER
from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER, BaseServer
from ansys.dpf.core import server_context


Expand Down Expand Up @@ -382,7 +383,7 @@ def connect():
raise e


def get_or_create_server(server):
def get_or_create_server(server: BaseServer) -> Union[BaseServer, None]:
"""Returns the given server or if None, creates a new one.

Parameters
Expand Down
2 changes: 1 addition & 1 deletion src/ansys/dpf/core/server_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,7 @@ def available_api_types(self):
pass

@abc.abstractmethod
def get_api_for_type(self, c_api, grpc_api):
def get_api_for_type(self, capi, grpcapi):
pass

@property
Expand Down
58 changes: 18 additions & 40 deletions tests/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ def test_connect_get_out_all_types_operator(server_type):
dpf.core.TimeFreqSupport(server=server_type),
dpf.core.Workflow(server=server_type),
dpf.core.DataTree(server=server_type),
# dpf.core.GenericDataContainer(server=server_type), # Fails for LegacyGrpc
dpf.core.StringField(server=server_type),
dpf.core.CustomTypeField(np.float64, server=server_type),
]
Expand Down Expand Up @@ -261,18 +262,6 @@ def test_connect_operator_output_operator(server_type):
assert len(fOut.data) == 3


@pytest.mark.skipif(
not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0,
reason="Connect an operator as an input is supported starting server version 7.0",
)
def test_connect_generic_data_container_operator(server_type):
op = dpf.core.Operator("forward", server=server_type)
inpt = dpf.core.GenericDataContainer(server=server_type)
op.connect(0, inpt)
output = op.get_output(0, dpf.core.types.generic_data_container)
assert output is not None


@pytest.mark.skipif(
not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_6_2,
reason="Connect an operator as an input is supported starting server version 6.2",
Expand Down Expand Up @@ -632,19 +621,19 @@ def test_connect_model(plate_msup, server_type):
assert np.allclose(fc[0].data[0], [5.12304110e-14, 3.64308310e-04, 5.79805917e-06])


def test_operator_several_output_types(plate_msup, server_type):
inpt = dpf.core.Field(nentities=3, server=server_type)
def test_operator_several_output_types_remote(plate_msup, server_type_remote_process):
inpt = dpf.core.Field(nentities=3, server=server_type_remote_process)
inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9]
inpt.scoping.ids = [1, 2, 3]
inpt.unit = "m"
op = dpf.core.Operator("unit_convert", server=server_type)
op = dpf.core.Operator("unit_convert", server=server_type_remote_process)
op.inputs.entity_to_convert(inpt)
op.inputs.unit_name("mm")
f = op.outputs.converted_entity_as_field()
assert f.unit == "mm"
assert np.allclose(f.data.flatten("C"), np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]) * 1000)

model = dpf.core.Model(plate_msup, server=server_type)
model = dpf.core.Model(plate_msup, server=server_type_remote_process)
din = model.metadata.meshed_region.nodes.coordinates_field.data

assert model.metadata.meshed_region.nodes.coordinates_field.unit == "m"
Expand All @@ -657,26 +646,6 @@ def test_operator_several_output_types(plate_msup, server_type):
assert np.allclose(m.nodes.coordinates_field.data, np.array(din) * 1000)


def test_operator_several_output_types2(server_type):
inpt = dpf.core.Field(nentities=3, server=server_type)
inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9]
inpt.scoping.ids = [1, 2, 3]
inpt.unit = "m"
uc = dpf.core.Operator("Rescope", server=server_type)
uc.inputs.fields(inpt)
uc.inputs.mesh_scoping(dpf.core.Scoping(ids=[1, 2]))
f = uc.outputs.fields_as_field()
assert np.allclose(f.data.flatten("C"), [1, 2, 3, 4, 5, 6])

fc = dpf.core.FieldsContainer(server=server_type)
fc.labels = ["time"]
fc.add_field({"time": 1}, inpt)

uc.inputs.fields(fc)
fc2 = uc.outputs.fields_as_fields_container()
assert np.allclose(fc2[0].data.flatten("C"), [1, 2, 3, 4, 5, 6])


def test_create_operator_config(server_type):
conf = dpf.core.Config(server=server_type)
assert conf.config_option_documentation("mutex") == ""
Expand Down Expand Up @@ -865,14 +834,23 @@ def test_connect_get_output_double_list_operator(server_type):


@conftest.raises_for_servers_version_under("4.0")
def test_connect_get_output_data_tree_operator():
d = dpf.core.DataTree({"name": "Paul"})
op = dpf.core.operators.utility.forward(d)
def test_connect_get_output_data_tree_operator(server_type):
d = dpf.core.DataTree({"name": "Paul"}, server=server_type)
op = dpf.core.operators.utility.forward(d, server=server_type)
d_out = op.get_output(0, dpf.core.types.data_tree)
assert d_out.get_as("name") == "Paul"


def test_operator_several_output_types(plate_msup):
@conftest.raises_for_servers_version_under("7.0")
def test_connect_get_output_generic_data_container_operator(server_clayer):
gdc = dpf.core.GenericDataContainer(server=server_clayer)
gdc.set_property("n", 1)
op = dpf.core.operators.utility.forward(gdc, server=server_clayer)
gdc_out = op.get_output(0, dpf.core.types.generic_data_container)
assert gdc_out.get_property("n") == 1


def test_operator_several_output_types_copy(plate_msup):
inpt = dpf.core.Field(nentities=3)
inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9]
inpt.scoping.ids = [1, 2, 3]
Expand Down
16 changes: 16 additions & 0 deletions tests/test_python_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from conftest import SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0
from ansys.dpf import core as dpf
import conftest
from ansys.dpf.core.custom_operator import update_virtual_environment_for_custom_operators
from ansys.dpf.core.errors import DPFServerException
from ansys.dpf.core.operator_specification import (
CustomSpecification,
Expand All @@ -26,6 +27,9 @@
if platform.system() == "Linux":
pytest.skip("Known failures for the Ubuntu-latest GitHub pipelines", allow_module_level=True)

update_virtual_environment_for_custom_operators(restore_original=True)
update_virtual_environment_for_custom_operators()


@pytest.fixture(scope="module")
def load_all_types_plugin(testfiles_dir):
Expand Down Expand Up @@ -200,6 +204,18 @@ def test_data_tree(server_type_remote_process, testfiles_dir):
assert dt.get_as("name") == "Paul"


@pytest.mark.skipif(not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, reason="Requires DPF 7.0")
def test_generic_data_container(server_clayer_remote_process, testfiles_dir):
load_all_types_plugin_with_serv(server_clayer_remote_process, testfiles_dir)
gdc = dpf.GenericDataContainer(server=server_clayer_remote_process)
gdc.set_property(property_name="n", prop=1)
op = dpf.Operator("custom_forward_generic_data_container", server=server_clayer_remote_process)
op.connect(0, gdc)
gdc2: dpf.GenericDataContainer = op.get_output(0, dpf.types.generic_data_container)
assert gdc2 is not None
assert gdc2.get_property("n") == 1


@conftest.raises_for_servers_version_under("4.0")
def test_syntax_error(server_type_remote_process, testfiles_dir):
dpf.load_library(
Expand Down
18 changes: 18 additions & 0 deletions tests/testfiles/pythonPlugins/all_types/dpf_types_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
types,
workflow,
data_tree,
generic_data_container,
)


Expand Down Expand Up @@ -192,3 +193,20 @@ def specification(self):
@property
def name(self):
return "custom_forward_data_tree"


class ForwardGenericDataContainerOperator(CustomOperatorBase):
def run(self):
f = self.get_input(0, generic_data_container.GenericDataContainer)
assert not f is None
f = self.get_input(0, types.generic_data_container)
self.set_output(0, f)
self.set_succeeded()

@property
def specification(self):
return None

@property
def name(self):
return "custom_forward_generic_data_container"
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,4 @@ def load_operators(*args):
record_operator(dpf_types_op.ForwardMeshesContainerOperator, *args)
record_operator(dpf_types_op.ForwardWorkflowOperator, *args)
record_operator(dpf_types_op.ForwardDataTreeOperator, *args)
record_operator(dpf_types_op.ForwardGenericDataContainerOperator, *args)