Skip to content

Commit 6b73f99

Browse files
authored
Allow GenericDataContainer as input/output of a CustomOperator and fix dpf-site.zip update (#1376)
* Make available_operator_names available at root level Signed-off-by: paul.profizi <[email protected]> * Add typehint to get_or_create_server Signed-off-by: paul.profizi <[email protected]> * Fix get_api_for_type argument naming Signed-off-by: paul.profizi <[email protected]> * Allow GenericDataContainer as output of custom_operator Signed-off-by: paul.profizi <[email protected]> * Add test and add GDC as input. Waiting for dpf-site.zip update feature Signed-off-by: paul.profizi <[email protected]> * Change update of dpf-site.zip to only change ansys-dpf-core (WIP) Signed-off-by: paul.profizi <[email protected]> * Update of dpf-site.zip works for non-editable install of ansys-dpf-core Signed-off-by: paul.profizi <[email protected]> * Use update of dpf-site.zip in test_python_plugins.py Signed-off-by: paul.profizi <[email protected]> * Fix type_to_input method for generic_data_container in _custom_operators_helpers.py Signed-off-by: paul.profizi <[email protected]> * Working dpf-site.zip update when ansys-dpf-core installed non-editable Signed-off-by: paul.profizi <[email protected]> * Working update of dpf-site.zip for both editable and non-editable installs of ansys-dpf-core Signed-off-by: paul.profizi <[email protected]> * Skip test for DPF<7.0 Signed-off-by: paul.profizi <[email protected]> * Add test_set_get_int_generic_data_container Signed-off-by: paul.profizi <[email protected]> * Revert "Add test_set_get_int_generic_data_container" This reverts commit 8ca296c. * Change operator.get_output to return None when response from server is None (instead of instantiating an empty DPF object) Signed-off-by: paul.profizi <[email protected]> * Skip LegacyGrpc in tests on GenericDataContainer as output of an Operator until server fix. Signed-off-by: paul.profizi <[email protected]> * Remove duplicate Signed-off-by: paul.profizi <[email protected]> --------- Signed-off-by: paul.profizi <[email protected]>
1 parent 2c95832 commit 6b73f99

File tree

10 files changed

+144
-60
lines changed

10 files changed

+144
-60
lines changed

src/ansys/dpf/core/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@
9292
from ansys.dpf.core.mesh_info import MeshInfo
9393
from ansys.dpf.core.generic_data_container import GenericDataContainer
9494

95+
from ansys.dpf.core.dpf_operator import available_operator_names
9596

9697
# for matplotlib
9798
# solves "QApplication: invalid style override passed, ignoring it."

src/ansys/dpf/core/_custom_operators_helpers.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
meshes_container,
1818
result_info,
1919
string_field,
20-
custom_type_field,
20+
custom_type_field, generic_data_container,
2121
)
2222

2323
external_operator_api = external_operator_capi.ExternalOperatorCAPI
@@ -70,6 +70,10 @@ def __operator_main__(operator_functor, data):
7070
custom_type_field.CustomTypeField,
7171
external_operator_api.external_operator_put_out_custom_type_field,
7272
),
73+
(
74+
generic_data_container.GenericDataContainer,
75+
external_operator_api.external_operator_put_out_generic_data_container,
76+
)
7377
]
7478

7579
_type_to_input_method = [
@@ -140,6 +144,11 @@ def __operator_main__(operator_functor, data):
140144
external_operator_api.external_operator_get_in_data_tree,
141145
"data_tree",
142146
),
147+
(
148+
generic_data_container.GenericDataContainer,
149+
external_operator_api.external_operator_get_in_generic_data_container,
150+
"generic_data_container",
151+
)
143152
# TO DO : (dpf_operator.Operator, external_operator_api.external_operator_get_in_operator,
144153
# "operator"),
145154
]

src/ansys/dpf/core/custom_operator.py

Lines changed: 63 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,9 @@
1010
import ctypes
1111
import os
1212
import pathlib
13+
import re
1314
import shutil
15+
import tempfile
1416
import warnings
1517
import zipfile
1618

@@ -37,7 +39,9 @@
3739
from ansys.dpf.gate import object_handler, capi, dpf_vector, integral_types
3840

3941

40-
def update_virtual_environment_for_custom_operators(restore_original: bool = False):
42+
def update_virtual_environment_for_custom_operators(
43+
restore_original: bool = False,
44+
):
4145
"""Updates the dpf-site.zip file used to start a venv for Python custom operators to run in.
4246
4347
It updates the site-packages in dpf-site.zip with the site-packages of the current venv.
@@ -54,6 +58,7 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal
5458
"""
5559
# Get the path to the dpf-site.zip in the current DPF server
5660
server = dpf.server.get_or_create_server(dpf.SERVER)
61+
print(server.ansys_path)
5762
if server.has_client():
5863
raise NotImplementedError(
5964
"Updating the dpf-site.zip of a DPF Server is only available when InProcess."
@@ -71,6 +76,10 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal
7176
else:
7277
warnings.warn("No original dpf-site.zip found. Current is most likely the original.")
7378
else:
79+
# Store original dpf-site.zip for this DPF Server if no original is stored
80+
if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)):
81+
os.mkdir(os.path.dirname(original_dpf_site_zip_path))
82+
shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path)
7483
# Get the current paths to site_packages
7584
import site
7685
paths_to_current_site_packages = site.getsitepackages()
@@ -83,17 +92,59 @@ def update_virtual_environment_for_custom_operators(restore_original: bool = Fal
8392
if current_site_packages_path is None:
8493
warnings.warn("Could not find a currently loaded site-packages folder to update from.")
8594
return
86-
# Store original dpf-site.zip for this DPF Server if no original is stored
87-
if not os.path.exists(os.path.dirname(original_dpf_site_zip_path)):
88-
os.mkdir(os.path.dirname(original_dpf_site_zip_path))
89-
shutil.move(src=current_dpf_site_zip_path, dst=original_dpf_site_zip_path)
90-
# Zip the current site-packages at the destination
91-
with zipfile.ZipFile(current_dpf_site_zip_path, mode="w") as archive:
92-
for file_path in current_site_packages_path.rglob("*"):
93-
archive.write(
94-
filename=file_path,
95-
arcname=file_path.relative_to(current_site_packages_path)
96-
)
95+
# If an ansys.dpf.core.path file exists, then the installation is editable
96+
path_file = os.path.join(current_site_packages_path, "ansys.dpf.core.pth")
97+
if os.path.exists(path_file):
98+
# Treat editable installation of ansys-dpf-core
99+
with open(path_file, "r") as f:
100+
current_site_packages_path = f.readline()
101+
with tempfile.TemporaryDirectory() as tmpdir:
102+
os.mkdir(os.path.join(tmpdir, "ansys_dpf_core"))
103+
ansys_dir = os.path.join(tmpdir, "ansys_dpf_core")
104+
os.mkdir(os.path.join(ansys_dir, "ansys"))
105+
os.mkdir(os.path.join(ansys_dir, "ansys", "dpf"))
106+
os.mkdir(os.path.join(ansys_dir, "ansys", "grpc"))
107+
shutil.copytree(
108+
src=os.path.join(current_site_packages_path, "ansys", "dpf", "core"),
109+
dst=os.path.join(ansys_dir, "ansys", "dpf", "core"),
110+
ignore=lambda directory, contents: ["__pycache__", "result_files"],
111+
)
112+
shutil.copytree(
113+
src=os.path.join(current_site_packages_path, "ansys", "dpf", "gate"),
114+
dst=os.path.join(ansys_dir, "ansys", "dpf", "gate"),
115+
ignore=lambda directory, contents: ["__pycache__"],
116+
)
117+
shutil.copytree(
118+
src=os.path.join(current_site_packages_path, "ansys", "grpc", "dpf"),
119+
dst=os.path.join(ansys_dir, "ansys", "grpc", "dpf"),
120+
ignore=lambda directory, contents: ["__pycache__"],
121+
)
122+
# Find the .dist_info folder
123+
pattern = re.compile(r'^ansys_dpf_core\S*')
124+
for p in pathlib.Path(current_site_packages_path).iterdir():
125+
if p.is_dir():
126+
# print(p.stem)
127+
if re.search(pattern, p.stem):
128+
dist_info_path = p
129+
break
130+
shutil.copytree(
131+
src=dist_info_path,
132+
dst=os.path.join(ansys_dir, dist_info_path.name),
133+
)
134+
# Zip the files as dpf-site.zip
135+
base_name = os.path.join(tmpdir, "ansys_dpf_core_zip")
136+
base_dir = "."
137+
root_dir = os.path.join(tmpdir, "ansys_dpf_core") # OK
138+
shutil.make_archive(base_name=base_name, root_dir=root_dir, base_dir=base_dir, format='zip')
139+
# Include files of interest from the original dpf-site.zip and the ansys_dpf_core.zip
140+
with zipfile.ZipFile(current_dpf_site_zip_path, "w") as archive:
141+
with zipfile.ZipFile(original_dpf_site_zip_path, mode="r") as original:
142+
for item in original.infolist():
143+
if "ansys" not in item.filename:
144+
archive.writestr(item, original.read(item))
145+
with zipfile.ZipFile(base_name+'.zip', mode="r") as original:
146+
for item in original.infolist():
147+
archive.writestr(item, original.read(item))
97148

98149

99150
def record_operator(operator_type, *args) -> None:

src/ansys/dpf/core/dpf_operator.py

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -529,17 +529,26 @@ def get_output(self, pin=0, output_type=None):
529529
for type_tuple in self._type_to_output_method:
530530
if output_type is type_tuple[0]:
531531
if len(type_tuple) >= 3:
532+
internal_obj = type_tuple[1](self, pin)
533+
if internal_obj is None:
534+
self._progress_thread = None
535+
return
532536
if isinstance(type_tuple[2], str):
533-
parameters = {type_tuple[2]: type_tuple[1](self, pin)}
537+
parameters = {type_tuple[2]: internal_obj}
534538
out = output_type(**parameters, server=self._server)
535539
else:
536-
out = type_tuple[2](type_tuple[1](self, pin))
540+
out = type_tuple[2](internal_obj)
537541
if out is None:
542+
internal_obj = type_tuple[1](self, pin)
543+
if internal_obj is None:
544+
self._progress_thread = None
545+
return
538546
try:
539-
return output_type(type_tuple[1](self, pin), server=self._server)
547+
return output_type(internal_obj, server=self._server)
540548
except TypeError:
541549
self._progress_thread = None
542-
return output_type(type_tuple[1](self, pin))
550+
return output_type(internal_obj)
551+
543552
if out is not None:
544553
self._progress_thread = None
545554
return out

src/ansys/dpf/core/server.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import inspect
1212
import warnings
1313
import traceback
14+
from typing import Union
1415

1516
from ansys import dpf
1617

@@ -22,7 +23,7 @@
2223
ServerFactory,
2324
CommunicationProtocols,
2425
)
25-
from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER
26+
from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER, BaseServer
2627
from ansys.dpf.core import server_context
2728

2829

@@ -382,7 +383,7 @@ def connect():
382383
raise e
383384

384385

385-
def get_or_create_server(server):
386+
def get_or_create_server(server: BaseServer) -> Union[BaseServer, None]:
386387
"""Returns the given server or if None, creates a new one.
387388
388389
Parameters

src/ansys/dpf/core/server_types.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -416,7 +416,7 @@ def available_api_types(self):
416416
pass
417417

418418
@abc.abstractmethod
419-
def get_api_for_type(self, c_api, grpc_api):
419+
def get_api_for_type(self, capi, grpcapi):
420420
pass
421421

422422
@property

tests/test_operator.py

Lines changed: 18 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,7 @@ def test_connect_get_out_all_types_operator(server_type):
113113
dpf.core.TimeFreqSupport(server=server_type),
114114
dpf.core.Workflow(server=server_type),
115115
dpf.core.DataTree(server=server_type),
116+
# dpf.core.GenericDataContainer(server=server_type), # Fails for LegacyGrpc
116117
dpf.core.StringField(server=server_type),
117118
dpf.core.CustomTypeField(np.float64, server=server_type),
118119
]
@@ -261,18 +262,6 @@ def test_connect_operator_output_operator(server_type):
261262
assert len(fOut.data) == 3
262263

263264

264-
@pytest.mark.skipif(
265-
not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0,
266-
reason="Connect an operator as an input is supported starting server version 7.0",
267-
)
268-
def test_connect_generic_data_container_operator(server_type):
269-
op = dpf.core.Operator("forward", server=server_type)
270-
inpt = dpf.core.GenericDataContainer(server=server_type)
271-
op.connect(0, inpt)
272-
output = op.get_output(0, dpf.core.types.generic_data_container)
273-
assert output is not None
274-
275-
276265
@pytest.mark.skipif(
277266
not conftest.SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_6_2,
278267
reason="Connect an operator as an input is supported starting server version 6.2",
@@ -632,19 +621,19 @@ def test_connect_model(plate_msup, server_type):
632621
assert np.allclose(fc[0].data[0], [5.12304110e-14, 3.64308310e-04, 5.79805917e-06])
633622

634623

635-
def test_operator_several_output_types(plate_msup, server_type):
636-
inpt = dpf.core.Field(nentities=3, server=server_type)
624+
def test_operator_several_output_types_remote(plate_msup, server_type_remote_process):
625+
inpt = dpf.core.Field(nentities=3, server=server_type_remote_process)
637626
inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9]
638627
inpt.scoping.ids = [1, 2, 3]
639628
inpt.unit = "m"
640-
op = dpf.core.Operator("unit_convert", server=server_type)
629+
op = dpf.core.Operator("unit_convert", server=server_type_remote_process)
641630
op.inputs.entity_to_convert(inpt)
642631
op.inputs.unit_name("mm")
643632
f = op.outputs.converted_entity_as_field()
644633
assert f.unit == "mm"
645634
assert np.allclose(f.data.flatten("C"), np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]) * 1000)
646635

647-
model = dpf.core.Model(plate_msup, server=server_type)
636+
model = dpf.core.Model(plate_msup, server=server_type_remote_process)
648637
din = model.metadata.meshed_region.nodes.coordinates_field.data
649638

650639
assert model.metadata.meshed_region.nodes.coordinates_field.unit == "m"
@@ -657,26 +646,6 @@ def test_operator_several_output_types(plate_msup, server_type):
657646
assert np.allclose(m.nodes.coordinates_field.data, np.array(din) * 1000)
658647

659648

660-
def test_operator_several_output_types2(server_type):
661-
inpt = dpf.core.Field(nentities=3, server=server_type)
662-
inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9]
663-
inpt.scoping.ids = [1, 2, 3]
664-
inpt.unit = "m"
665-
uc = dpf.core.Operator("Rescope", server=server_type)
666-
uc.inputs.fields(inpt)
667-
uc.inputs.mesh_scoping(dpf.core.Scoping(ids=[1, 2]))
668-
f = uc.outputs.fields_as_field()
669-
assert np.allclose(f.data.flatten("C"), [1, 2, 3, 4, 5, 6])
670-
671-
fc = dpf.core.FieldsContainer(server=server_type)
672-
fc.labels = ["time"]
673-
fc.add_field({"time": 1}, inpt)
674-
675-
uc.inputs.fields(fc)
676-
fc2 = uc.outputs.fields_as_fields_container()
677-
assert np.allclose(fc2[0].data.flatten("C"), [1, 2, 3, 4, 5, 6])
678-
679-
680649
def test_create_operator_config(server_type):
681650
conf = dpf.core.Config(server=server_type)
682651
assert conf.config_option_documentation("mutex") == ""
@@ -865,14 +834,23 @@ def test_connect_get_output_double_list_operator(server_type):
865834

866835

867836
@conftest.raises_for_servers_version_under("4.0")
868-
def test_connect_get_output_data_tree_operator():
869-
d = dpf.core.DataTree({"name": "Paul"})
870-
op = dpf.core.operators.utility.forward(d)
837+
def test_connect_get_output_data_tree_operator(server_type):
838+
d = dpf.core.DataTree({"name": "Paul"}, server=server_type)
839+
op = dpf.core.operators.utility.forward(d, server=server_type)
871840
d_out = op.get_output(0, dpf.core.types.data_tree)
872841
assert d_out.get_as("name") == "Paul"
873842

874843

875-
def test_operator_several_output_types(plate_msup):
844+
@conftest.raises_for_servers_version_under("7.0")
845+
def test_connect_get_output_generic_data_container_operator(server_clayer):
846+
gdc = dpf.core.GenericDataContainer(server=server_clayer)
847+
gdc.set_property("n", 1)
848+
op = dpf.core.operators.utility.forward(gdc, server=server_clayer)
849+
gdc_out = op.get_output(0, dpf.core.types.generic_data_container)
850+
assert gdc_out.get_property("n") == 1
851+
852+
853+
def test_operator_several_output_types_copy(plate_msup):
876854
inpt = dpf.core.Field(nentities=3)
877855
inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9]
878856
inpt.scoping.ids = [1, 2, 3]

tests/test_python_plugins.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from conftest import SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0
66
from ansys.dpf import core as dpf
77
import conftest
8+
from ansys.dpf.core.custom_operator import update_virtual_environment_for_custom_operators
89
from ansys.dpf.core.errors import DPFServerException
910
from ansys.dpf.core.operator_specification import (
1011
CustomSpecification,
@@ -26,6 +27,9 @@
2627
if platform.system() == "Linux":
2728
pytest.skip("Known failures for the Ubuntu-latest GitHub pipelines", allow_module_level=True)
2829

30+
update_virtual_environment_for_custom_operators(restore_original=True)
31+
update_virtual_environment_for_custom_operators()
32+
2933

3034
@pytest.fixture(scope="module")
3135
def load_all_types_plugin(testfiles_dir):
@@ -200,6 +204,18 @@ def test_data_tree(server_type_remote_process, testfiles_dir):
200204
assert dt.get_as("name") == "Paul"
201205

202206

207+
@pytest.mark.skipif(not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, reason="Requires DPF 7.0")
208+
def test_generic_data_container(server_clayer_remote_process, testfiles_dir):
209+
load_all_types_plugin_with_serv(server_clayer_remote_process, testfiles_dir)
210+
gdc = dpf.GenericDataContainer(server=server_clayer_remote_process)
211+
gdc.set_property(property_name="n", prop=1)
212+
op = dpf.Operator("custom_forward_generic_data_container", server=server_clayer_remote_process)
213+
op.connect(0, gdc)
214+
gdc2: dpf.GenericDataContainer = op.get_output(0, dpf.types.generic_data_container)
215+
assert gdc2 is not None
216+
assert gdc2.get_property("n") == 1
217+
218+
203219
@conftest.raises_for_servers_version_under("4.0")
204220
def test_syntax_error(server_type_remote_process, testfiles_dir):
205221
dpf.load_library(

tests/testfiles/pythonPlugins/all_types/dpf_types_op.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
types,
1111
workflow,
1212
data_tree,
13+
generic_data_container,
1314
)
1415

1516

@@ -192,3 +193,20 @@ def specification(self):
192193
@property
193194
def name(self):
194195
return "custom_forward_data_tree"
196+
197+
198+
class ForwardGenericDataContainerOperator(CustomOperatorBase):
199+
def run(self):
200+
f = self.get_input(0, generic_data_container.GenericDataContainer)
201+
assert not f is None
202+
f = self.get_input(0, types.generic_data_container)
203+
self.set_output(0, f)
204+
self.set_succeeded()
205+
206+
@property
207+
def specification(self):
208+
return None
209+
210+
@property
211+
def name(self):
212+
return "custom_forward_generic_data_container"

tests/testfiles/pythonPlugins/all_types/load_operators_func.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,3 +23,4 @@ def load_operators(*args):
2323
record_operator(dpf_types_op.ForwardMeshesContainerOperator, *args)
2424
record_operator(dpf_types_op.ForwardWorkflowOperator, *args)
2525
record_operator(dpf_types_op.ForwardDataTreeOperator, *args)
26+
record_operator(dpf_types_op.ForwardGenericDataContainerOperator, *args)

0 commit comments

Comments
 (0)