Skip to content

Commit a34aa05

Browse files
committed
Merged PR 337513: Mirror GitHub to TFS
Mirroring GitHub into TFS, bringing (among other things) InProcess by default of GitHub release 0.5.2. Testing pipeline [here](https://tfs.ansys.com:8443/tfs/ANSYS_Development/DPF/_build/results?buildId=7372183&view=logs&s=77b07857-e3de-563a-962e-fe3f8174b54b&j=32ca9294-3ac1-524a-049d-49e4f25e2670). OK Related work items: #692152
1 parent f154415 commit a34aa05

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

50 files changed

+1111
-315
lines changed

LICENSE

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
The MIT License
1+
MIT License
22

33
Copyright (c) 2022 ANSYS, Inc. All rights reserved.
44

ansys/dpf/core/core.py

Lines changed: 18 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -94,8 +94,14 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None):
9494
--------
9595
>>> from ansys.dpf import core as dpf
9696
>>> from ansys.dpf.core import examples
97-
>>> file_path = dpf.upload_file_in_tmp_folder(examples.static_rst)
98-
97+
>>> server = dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer,
98+
... as_global=False)
99+
>>> file_path = dpf.upload_file_in_tmp_folder(examples.static_rst, server=server)
100+
101+
Notes
102+
-----
103+
Is not implemented for usage with type(server)=
104+
:class:`ansys.dpf.core.server_types.InProcessServer`.
99105
"""
100106
base = BaseService(server, load_operators=False)
101107
return base.upload_file_in_tmp_folder(file_path, new_file_name)
@@ -153,12 +159,17 @@ def download_file(server_file_path, to_client_file_path, server=None):
153159
--------
154160
>>> from ansys.dpf import core as dpf
155161
>>> from ansys.dpf.core import examples
156-
>>> import os
157-
>>> file_path = dpf.upload_file_in_tmp_folder(examples.static_rst)
158-
>>> dpf.download_file(file_path, examples.static_rst)
162+
>>> server = dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer,
163+
... as_global=False)
164+
>>> file_path = dpf.upload_file_in_tmp_folder(examples.static_rst, server=server)
165+
>>> dpf.download_file(file_path, examples.static_rst, server=server)
159166
<BLANKLINE>
160-
Downloading...
167+
...
161168
169+
Notes
170+
-----
171+
Is not implemented for usage with type(server)=
172+
:class:`ansys.dpf.core.server_types.InProcessServer`.
162173
"""
163174
base = BaseService(server, load_operators=False)
164175
return base.download_file(server_file_path, to_client_file_path)
@@ -385,7 +396,7 @@ def __generate_code(TARGET_PATH, filename, name, symbol):
385396

386397
local_dir = os.path.dirname(os.path.abspath(__file__))
387398
LOCAL_PATH = os.path.join(local_dir, "operators")
388-
if self._server().has_client():
399+
if not self._server().local_server:
389400
if self._server().os != 'posix' or (not self._server().os and os.name != 'posix'):
390401
# send local generated code
391402
TARGET_PATH = self.make_tmp_dir_server()

ansys/dpf/core/cyclic_support.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,8 @@ def sectors_set_for_expansion(self, stage_num=0) -> Scoping:
191191
>>> from ansys.dpf.core import examples
192192
>>> multi_stage = examples.download_multi_stage_cyclic_result()
193193
>>> cyc_support = Model(multi_stage).metadata.result_info.cyclic_support
194-
>>> print(cyc_support.sectors_set_for_expansion(stage_num=1).ids)
194+
>>> sectors_scoping = cyc_support.sectors_set_for_expansion(stage_num=1)
195+
>>> print(sectors_scoping.ids)
195196
[...0... 1... 2... 3... 4... 5... 6... 7... 8... 9... 10... 11]
196197
197198
"""
@@ -223,7 +224,8 @@ def expand_node_id(self, node_id, sectors=None, stage_num=0):
223224
>>> from ansys.dpf.core import examples
224225
>>> multi_stage = examples.download_multi_stage_cyclic_result()
225226
>>> cyc_support = Model(multi_stage).metadata.result_info.cyclic_support
226-
>>> print(cyc_support.expand_node_id(1,stage_num=0).ids)
227+
>>> expanded_scoping = cyc_support.expand_node_id(1,stage_num=0)
228+
>>> print(expanded_scoping.ids)
227229
[...1... 3596... 5816... 8036... 10256... 12476]
228230
229231
"""
@@ -261,7 +263,8 @@ def expand_element_id(self, element_id, sectors=None, stage_num=0):
261263
>>> from ansys.dpf.core import examples
262264
>>> multi_stage = examples.download_multi_stage_cyclic_result()
263265
>>> cyc_support = Model(multi_stage).metadata.result_info.cyclic_support
264-
>>> print(cyc_support.expand_element_id(1,stage_num=0).ids)
266+
>>> expanded_scoping = cyc_support.expand_element_id(1,stage_num=0)
267+
>>> print(expanded_scoping.ids)
265268
[...1... 1558... 2533... 3508... 4483... 5458]
266269
267270
"""

ansys/dpf/core/data_tree.py

Lines changed: 36 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -223,6 +223,23 @@ def to_fill(self):
223223
"""
224224
return _LocalDataTree(self)
225225

226+
def _serialize(self, path, operator):
227+
from ansys.dpf import core
228+
operator.inputs.data_tree.connect(self)
229+
if path:
230+
if self._server.local_server:
231+
operator.inputs.path.connect(path)
232+
operator.run()
233+
return path
234+
else:
235+
directory = core.core.make_tmp_dir_server(self._server)
236+
server_path = core.path_utilities.join(directory, "tmp.txt", server=self._server)
237+
operator.inputs.path.connect(server_path)
238+
operator.run()
239+
return core.download_file(server_path, path, server=self._server)
240+
else:
241+
return operator.get_output(0, core.types.string)
242+
226243
def write_to_txt(self, path=None):
227244
"""
228245
Writes the data tree either as a file or as returned string in a text format.
@@ -246,21 +263,12 @@ def write_to_txt(self, path=None):
246263
>>> import os
247264
>>> data_tree.write_to_txt(os.path.join(tempfile.mkdtemp(), "data_tree.txt"))
248265
<BLANKLINE>
249-
Downloading...
266+
...
250267
251268
"""
252269
from ansys.dpf.core.operators.serialization import data_tree_to_txt
253-
from ansys.dpf import core
254270
op = data_tree_to_txt(server=self._server)
255-
op.inputs.data_tree.connect(self)
256-
if path:
257-
directory = core.core.make_tmp_dir_server(self._server)
258-
server_path = core.path_utilities.join(directory, "tmp.txt", server=self._server)
259-
op.inputs.path.connect(server_path)
260-
op.run()
261-
return core.download_file(server_path, path)
262-
else:
263-
return op.get_output(0, core.types.string)
271+
return self._serialize(path, op)
264272

265273
def write_to_json(self, path=None):
266274
"""
@@ -285,21 +293,26 @@ def write_to_json(self, path=None):
285293
>>> import os
286294
>>> data_tree.write_to_json(os.path.join(tempfile.mkdtemp(), "data_tree.json"))
287295
<BLANKLINE>
288-
Downloading...
296+
...
289297
290298
"""
291299
from ansys.dpf.core.operators.serialization import data_tree_to_json
292-
from ansys.dpf import core
293300
op = data_tree_to_json(server=self._server)
294-
op.inputs.data_tree.connect(self)
301+
return self._serialize(path, op)
302+
303+
@staticmethod
304+
def _deserialize(path, txt, server, operator):
305+
from ansys.dpf import core
295306
if path:
296-
directory = core.core.make_tmp_dir_server(self._server)
297-
server_path = core.path_utilities.join(directory, "tmp.txt", server=self._server)
298-
op.inputs.path.connect(server_path)
299-
op.run()
300-
return core.download_file(server_path, path)
301-
else:
302-
return op.get_output(0, core.types.string)
307+
server = server_module.get_or_create_server(server)
308+
if server.local_server:
309+
operator.inputs.string_or_path.connect(core.DataSources(path, server=server))
310+
else:
311+
server_path = core.upload_file_in_tmp_folder(path, server=server)
312+
operator.inputs.string_or_path.connect(core.DataSources(server_path, server=server))
313+
elif txt:
314+
operator.inputs.string_or_path.connect(str(txt))
315+
return operator.outputs.data_tree()
303316

304317
@staticmethod
305318
def read_from_json(path=None, txt=None, server=None):
@@ -332,14 +345,8 @@ def read_from_json(path=None, txt=None, server=None):
332345
333346
"""
334347
from ansys.dpf.core.operators.serialization import json_to_data_tree
335-
from ansys.dpf import core
336348
op = json_to_data_tree(server=server)
337-
if path:
338-
server_path = core.upload_file_in_tmp_folder(path, server=server)
339-
op.inputs.string_or_path.connect(core.DataSources(server_path, server=server))
340-
elif txt:
341-
op.inputs.string_or_path.connect(str(txt))
342-
return op.outputs.data_tree()
349+
return DataTree._deserialize(path, txt, server, op)
343350

344351
@staticmethod
345352
def read_from_txt(path=None, txt=None, server=None):
@@ -372,14 +379,8 @@ def read_from_txt(path=None, txt=None, server=None):
372379
373380
"""
374381
from ansys.dpf.core.operators.serialization import txt_to_data_tree
375-
from ansys.dpf import core
376382
op = txt_to_data_tree(server=server)
377-
if path:
378-
server_path = core.upload_file_in_tmp_folder(path, server=server)
379-
op.inputs.string_or_path.connect(core.DataSources(server_path, server=server))
380-
elif txt:
381-
op.inputs.string_or_path.connect(str(txt))
382-
return op.outputs.data_tree()
383+
return DataTree._deserialize(path, txt, server, op)
383384

384385
def has(self, entry):
385386
"""

ansys/dpf/core/dpf_operator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,7 @@ def connect(self, pin, inpt, pin_out=0):
207207
>>> max_fc_op.inputs.connect(disp_op.outputs)
208208
>>> max_field = max_fc_op.outputs.field_max()
209209
>>> max_field.data
210-
array([[0.59428386, 0.00201751, 0.0006032 ]])
210+
DPFArray([[0.59428386, 0.00201751, 0.0006032 ]]...
211211
212212
"""
213213
if inpt is self:

ansys/dpf/core/elements.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -578,8 +578,8 @@ def connectivities_field(self):
578578
>>> elements = model.metadata.meshed_region.elements
579579
>>> field = elements.connectivities_field
580580
>>> field.get_entity_data(1)
581-
array([ 0, 11, 13, 25, 2, 9, 8, 3, 29, 58, 63, 32, 40, 52, 42, 37, 28,
582-
55, 53, 43])
581+
DPFArray([ 0, 11, 13, 25, 2, 9, 8, 3, 29, 58, 63, 32, 40, 52, 42, 37, 28,
582+
55, 53, 43]...
583583
584584
"""
585585
return self._get_connectivities_field()

ansys/dpf/core/examples/downloads.py

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -452,7 +452,7 @@ def download_example_asme_result() -> str:
452452

453453
def download_crankshaft() -> str:
454454
"""Download the result file of an example of a crankshaft
455-
under load simulation and return the download path.
455+
under load and return the download path.
456456
457457
Examples files are downloaded to a persistent cache to avoid
458458
re-downloading the same file twice.
@@ -472,4 +472,28 @@ def download_crankshaft() -> str:
472472
'C:/Users/user/AppData/local/temp/crankshaft.rst'
473473
474474
"""
475-
return _download_file("crankshaft", "crankshaft.rst")
475+
return _download_file("crankshaft", "crankshaft.rst")
476+
477+
def download_piston_rod() -> str:
478+
"""Download the result file of an example of a piston rod
479+
under load and return the download path.
480+
481+
Examples files are downloaded to a persistent cache to avoid
482+
re-downloading the same file twice.
483+
484+
Returns
485+
-------
486+
str
487+
Path to the example file.
488+
489+
Examples
490+
--------
491+
Download an example result file and return the path of the file
492+
493+
>>> from ansys.dpf.core import examples
494+
>>> path = examples.piston_rod
495+
>>> path
496+
'C:/Users/user/AppData/local/temp/piston_rod.rst'
497+
498+
"""
499+
return _download_file("piston_rod", "piston_rod.rst")

ansys/dpf/core/field.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ class Field(_FieldBase):
9696
>>> fields_container = disp.outputs.fields_container()
9797
>>> field = fields_container[0]
9898
>>> field.data[2]
99-
array([-0.00672665, -0.03213735, 0.00016716])
99+
DPFArray([-0.00672665, -0.03213735, 0.00016716]...
100100
101101
"""
102102

ansys/dpf/core/field_base.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ def get_entity_data(self, index):
287287
>>> stress_op = model.results.stress()
288288
>>> fields_container = stress_op.outputs.fields_container()
289289
>>> fields_container[0].get_entity_data(0)
290-
array([[-3.27795062e+05, 1.36012200e+06, 1.49090608e+08,
290+
DPFArray([[-3.27795062e+05, 1.36012200e+06, 1.49090608e+08,
291291
-4.88688900e+06, 1.43038560e+07, 1.65455040e+07],
292292
[-4.63817550e+06, 1.29312225e+06, 1.20411832e+08,
293293
-6.06617800e+06, 2.34829700e+07, 1.77231120e+07],
@@ -302,7 +302,7 @@ def get_entity_data(self, index):
302302
[ 9.25567760e+07, 8.15244320e+07, 2.77157632e+08,
303303
-1.48489875e+06, 5.89250600e+07, 2.05608920e+07],
304304
[ 6.70443680e+07, 8.70343440e+07, 2.73050464e+08,
305-
-2.48670150e+06, 1.52268930e+07, 6.09583280e+07]])
305+
-2.48670150e+06, 1.52268930e+07, 6.09583280e+07]]...
306306
307307
"""
308308
pass
@@ -325,7 +325,7 @@ def get_entity_data_by_id(self, id):
325325
>>> stress_op = model.results.stress()
326326
>>> fields_container = stress_op.outputs.fields_container()
327327
>>> fields_container[0].get_entity_data_by_id(391)
328-
array([[-3.27795062e+05, 1.36012200e+06, 1.49090608e+08,
328+
DPFArray([[-3.27795062e+05, 1.36012200e+06, 1.49090608e+08,
329329
-4.88688900e+06, 1.43038560e+07, 1.65455040e+07],
330330
[-4.63817550e+06, 1.29312225e+06, 1.20411832e+08,
331331
-6.06617800e+06, 2.34829700e+07, 1.77231120e+07],
@@ -340,7 +340,7 @@ def get_entity_data_by_id(self, id):
340340
[ 9.25567760e+07, 8.15244320e+07, 2.77157632e+08,
341341
-1.48489875e+06, 5.89250600e+07, 2.05608920e+07],
342342
[ 6.70443680e+07, 8.70343440e+07, 2.73050464e+08,
343-
-2.48670150e+06, 1.52268930e+07, 6.09583280e+07]])
343+
-2.48670150e+06, 1.52268930e+07, 6.09583280e+07]]...
344344
345345
"""
346346
pass
@@ -363,8 +363,8 @@ def append(self, data, scopingid):
363363
>>> field.append([1.,2.,3.],1)
364364
>>> field.append([1.,2.,3.],2)
365365
>>> field.data
366-
array([[1., 2., 3.],
367-
[1., 2., 3.]])
366+
DPFArray([[1., 2., 3.],
367+
[1., 2., 3.]]...
368368
>>> field.scoping.ids
369369
<BLANKLINE>
370370
...[1, 2]...

ansys/dpf/core/field_definition.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ def dimensionality(self):
121121
dimensionality : Dimensionality
122122
Nature and size of the elementary data.
123123
"""
124-
dim = integral_types.MutableListInt32()
124+
dim = integral_types.MutableListInt32(size=3)
125125
nature = integral_types.MutableInt32()
126126
self._api.csfield_definition_fill_dimensionality(self, dim, nature, dim.internal_size)
127127
return Dimensionality(dim.tolist(), natures(int(nature)))

0 commit comments

Comments
 (0)