Skip to content

Commit ba911fb

Browse files
authored
Merge pull request #18 from pyansys/docs/additional-examples
docs/additional examples
2 parents 2e394f0 + cb7a6f9 commit ba911fb

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+921
-9180
lines changed

.github/workflows/ci-build.yml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,7 @@ jobs:
3535
run: |
3636
pip install ansys.grpc.dpf --extra-index-url https://[email protected]/pyansys/_packaging/pyansys/pypi/simple/
3737
pip install -r requirements_build.txt
38-
python setup.py bdist_wheel
39-
pip install dist/*.whl
38+
pip install -e .
4039
cd tests
4140
python -c "from ansys.dpf import core; print(core.Report(gpu=False))"
4241
env:

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,3 +40,6 @@ docker/v211
4040
# pytest -coverage
4141
.coverage
4242
test-output.xml
43+
44+
# downloaded files
45+
ansys/dpf/core/examples/_cache/

ansys/dpf/core/__init__.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import inspect
12
import os
23
import socket
34

@@ -33,6 +34,30 @@
3334
# solves "QApplication: invalid style override passed, ignoring it."
3435
os.environ['QT_STYLE_OVERRIDE'] = ''
3536

37+
# Setup data directory
38+
USER_DATA_PATH = None
39+
EXAMPLES_PATH = None
40+
if os.environ.get('DPF_DOCKER', False): # pragma: no cover
41+
# Running DPF within docker (likely for CI)
42+
# path must be relative to DPF directory
43+
_module_path = os.path.dirname(inspect.getfile(inspect.currentframe()))
44+
EXAMPLES_PATH = os.path.join(_module_path, 'examples', '_cache')
45+
if not os.path.isdir(EXAMPLES_PATH):
46+
os.makedirs(EXAMPLES_PATH)
47+
48+
else:
49+
try:
50+
import appdirs
51+
USER_DATA_PATH = appdirs.user_data_dir('ansys-dpf-core')
52+
if not os.path.exists(USER_DATA_PATH): # pragma: no cover
53+
os.makedirs(USER_DATA_PATH)
54+
55+
EXAMPLES_PATH = os.path.join(USER_DATA_PATH, 'examples')
56+
if not os.path.exists(EXAMPLES_PATH): # pragma: no cover
57+
os.makedirs(EXAMPLES_PATH)
58+
except: # pragma: no cover
59+
pass
60+
3661

3762
# Configure PyVista's ``rcParams`` for dpf
3863
if module_exists("pyvista"):

ansys/dpf/core/collection.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -203,8 +203,9 @@ def __getitem__(self, index):
203203
Entry at the index corresponding to the request.
204204
"""
205205
self_len = len(self)
206-
if index < 0: # no negative indices
207-
index = self_len - index
206+
if index < 0:
207+
# convert to a positive index
208+
index = self_len + index
208209

209210
if not self_len:
210211
raise IndexError('This collection contains no items')

ansys/dpf/core/data_sources.py

Lines changed: 20 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from ansys import dpf
44
from ansys.grpc.dpf import data_sources_pb2, data_sources_pb2_grpc, base_pb2
5+
from ansys.dpf.core.errors import protect_grpc
56

67

78
class DataSources:
@@ -28,7 +29,6 @@ class DataSources:
2829
Initialize a model from a result path
2930
>>> import dpf
3031
>>> dpf.core.DataSources('file.rst')
31-
3232
"""
3333

3434
def __init__(self, result_path=None, data_sources=None, channel=None):
@@ -48,6 +48,7 @@ def __init__(self, result_path=None, data_sources=None, channel=None):
4848
if result_path is not None:
4949
self.set_result_file_path(result_path)
5050

51+
@protect_grpc
5152
def _connect(self):
5253
"""Connect to the grpc service"""
5354
return data_sources_pb2_grpc.DataSourcesServiceStub(self._channel)
@@ -71,12 +72,6 @@ def set_result_file_path(self, filepath, key=""):
7172
>>> data_sources = dpf.core.DataSources()
7273
>>> data_sources.set_result_file_path('/tmp/file.rst')
7374
"""
74-
# The filename needs to be a fully qualified file name
75-
if not os.path.dirname(filepath):
76-
# append local path
77-
# TODO: this will not work on a remote server
78-
filepath = os.path.join(os.getcwd(), os.path.basename(filepath))
79-
8075
request = data_sources_pb2.UpdateRequest()
8176
request.result_path = True
8277
request.key = key
@@ -96,8 +91,8 @@ def add_file_path(self, filepath, key=""):
9691
9792
Examples
9893
--------
99-
>>> import dpf
100-
>>> data_sources = dpf.core.DataSources()
94+
>>> from ansys.dpf import core as dpf
95+
>>> data_sources = dpf.DataSources()
10196
>>> data_sources.add_file_path('/tmp/ds.dat')
10297
"""
10398
# The filename needs to be a fully qualified file name
@@ -111,51 +106,48 @@ def add_file_path(self, filepath, key=""):
111106
request.data_sources.CopyFrom(self._message)
112107
self._stub.Update(request)
113108

114-
def add_upstream(self, upstream_data_sources, upstream_id = -2):
109+
def add_upstream(self, upstream_data_sources, upstream_id=-2):
115110
"""Add an upstream datasources.
116111
117-
This is used to add a set of path creating an upstram for recursive workflows.
112+
This is used to add a set of path creating an upstram for
113+
recursive workflows.
118114
119115
Parameters
120116
----------
121117
datasources : DataSources
122118
123119
"""
124-
125120
request = data_sources_pb2.UpdateUpstreamRequest()
126121
request.upstream_id = upstream_id
127122
request.upstream_data_sources.CopyFrom(upstream_data_sources._message)
128123
request.data_sources.CopyFrom(self._message)
129-
self._stub.UpdateUpstream(request)
124+
self._stub.UpdateUpstream(request)
130125

131126
@property
132127
def result_key(self):
133-
info =self.__info__()
134-
return info["result_key"]
135-
128+
return self._info["result_key"]
129+
136130
@property
137131
def result_files(self):
138-
info =self.__info__()
139-
return info["paths"][self.result_key]
140-
141-
def __info__(self):
132+
return self._info["paths"][self.result_key]
133+
134+
@property
135+
def _info(self):
142136
list = self._stub.List(self._message)
143-
paths ={}
137+
paths = {}
144138
for key in list.paths:
145139
key_paths=[]
146140
for path in list.paths[key].paths:
147141
key_paths.append(path)
148-
paths[key]=key_paths
149-
out = {"result_key":list.result_key, "paths":paths}
142+
paths[key] = key_paths
143+
out = {"result_key": list.result_key, "paths": paths}
150144
return out
151-
145+
152146
def __str__(self):
153-
info =self.__info__()
154-
txt = f'DPF data_sources with result key: {self.result_key} \n'
147+
info = self._info
148+
txt = f'DPF data_sources with result key: {self.result_key}\n'
155149
txt += f'paths: {info["paths"]}\n'
156150
return txt
157-
158-
159151

160152
def __del__(self):
161153
try: # should silently fail

ansys/dpf/core/dpf_operator.py

Lines changed: 28 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Interface to underlying gRPC Operator"""
2+
from textwrap import wrap
23
import logging
34
import grpc
45
import functools
@@ -11,7 +12,6 @@
1112
from ansys.dpf.core.inputs import Inputs
1213
from ansys.dpf.core.outputs import Outputs
1314
from ansys.dpf.core.mapping_types import map_types_to_python
14-
from ansys.dpf.core.raw_operators import DPF_HTML_OPERATOR_DOCS
1515
from ansys.dpf.core.errors import protect_grpc
1616

1717
LOG = logging.getLogger(__name__)
@@ -63,21 +63,17 @@ def __init__(self, name, channel=None):
6363
self._description = None
6464
self.inputs = None
6565
self.outputs = None
66-
try:
67-
self.__send_init_request()
6866

69-
# add dynamic inputs
70-
if len(self._message.spec.map_input_pin_spec) > 0:
71-
self.inputs = Inputs(self._message.spec.map_input_pin_spec, self)
72-
if len(self._message.spec.map_output_pin_spec)!=0:
73-
self.outputs = Outputs(self._message.spec.map_output_pin_spec, self)
74-
self._description = self._message.spec.description
67+
self.__send_init_request()
7568

76-
except grpc.RpcError as e:
77-
if e.code() == grpc.StatusCode.INVALID_ARGUMENT:
78-
raise ValueError(f'Invalid operator name "{name}"')
69+
# add dynamic inputs
70+
if len(self._message.spec.map_input_pin_spec) > 0:
71+
self.inputs = Inputs(self._message.spec.map_input_pin_spec, self)
72+
if len(self._message.spec.map_output_pin_spec) != 0:
73+
self.outputs = Outputs(self._message.spec.map_output_pin_spec, self)
74+
self._description = self._message.spec.description
7975

80-
def _add_sub_res_operators(self, sub_results):
76+
def _add_sub_res_operators(self, sub_results):
8177
"""Dynamically add operators instantiating for sub-results.
8278
8379
The new operators subresults are connected to the parent
@@ -218,19 +214,20 @@ def __del__(self):
218214

219215
def __str__(self):
220216
# return this repr and operator one level up
221-
txt = f'DPF "{self.name}" operator\n'
217+
txt = f'DPF "{self.name}" Operator\n'
222218
if self._description:
223-
line = [' ','description:', self._description]
224-
txt+='{:^3} {:^6} {:^15}'.format(*line)
225-
txt+='\n'
219+
txt += ' Description:\n'
220+
txt += '\n'.join(wrap(self._description, initial_indent=' ',
221+
subsequent_indent=' '))
222+
txt += '\n\n'
226223
if self.inputs:
227-
line = [' ',self.inputs.__str__()]
228-
txt+='{:^3} {:^21}'.format(*line)
229-
txt+='\n'
224+
line = [' ', str(self.inputs)]
225+
txt += '{:^3} {:^21}'.format(*line)
226+
txt += '\n'
230227
if self.outputs:
231-
line = [' ',self.outputs.__str__()]
232-
txt+='{:^3} {:^21}'.format(*line)
233-
txt+='\n'
228+
line = [' ', str(self.outputs)]
229+
txt += '{:^3} {:^21}'.format(*line)
230+
txt += '\n'
234231

235232
return txt
236233

@@ -257,18 +254,20 @@ def _find_outputs_corresponding_pins(self, type_names, inpt, pin,
257254
elif python_name == "Any":
258255
corresponding_pins.append(pin)
259256

257+
@protect_grpc
260258
def _sub_result_op(self, name):
261259
op = Operator(name)
262-
if self.inputs!=None:
260+
if self.inputs is not None:
263261
for key in self.inputs._connected_inputs:
264262
inpt = self.inputs._connected_inputs[key]
265263
if type(inpt).__name__ == 'dict':
266264
for keyout in inpt:
267265
op.connect(key,inpt[keyout],keyout)
268-
else :
266+
else:
269267
op.connect(key,inpt)
270268
return op
271269

270+
@protect_grpc
272271
def __send_init_request(self):
273272
request = operator_pb2.OperatorName()
274273
request.name = self.name
@@ -277,12 +276,12 @@ def __send_init_request(self):
277276
def __mul__(self, inpt):
278277
if isinstance(inpt, Operator):
279278
op = Operator("dot")
280-
op.connect(0,self,0)
281-
op.connect(1,inpt,0)
279+
op.connect(0, self, 0)
280+
op.connect(1, inpt, 0)
282281
elif isinstance(inpt, float):
283282
op = Operator("scale")
284-
op.connect(0,self,0)
285-
op.connect(1,inpt)
283+
op.connect(0, self, 0)
284+
op.connect(1, inpt)
286285
return op
287286

288287
def __truediv__(self, inpt):
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
11
from .examples import *
2+
from .downloads import *
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
"""Download example datasets from https://github.com/pyansys/example-data"""
2+
import shutil
3+
import os
4+
import urllib.request
5+
6+
from ansys.dpf.core import EXAMPLES_PATH
7+
EXAMPLE_REPO = 'https://github.com/pyansys/example-data/raw/master/result_files/'
8+
9+
10+
def delete_downloads():
11+
"""Delete all downloaded examples to free space or update the files"""
12+
shutil.rmtree(EXAMPLES_PATH)
13+
os.makedirs(EXAMPLES_PATH)
14+
15+
16+
def _get_file_url(directory, filename):
17+
return EXAMPLE_REPO + '/'.join([directory, filename])
18+
19+
20+
def _retrieve_file(url, filename, directory):
21+
"""Download a file from a url"""
22+
# First check if file has already been downloaded
23+
local_path = os.path.join(EXAMPLES_PATH, directory, os.path.basename(filename))
24+
local_path_no_zip = local_path.replace('.zip', '')
25+
if os.path.isfile(local_path_no_zip) or os.path.isdir(local_path_no_zip):
26+
return local_path_no_zip, None
27+
28+
# grab the correct url retriever
29+
urlretrieve = urllib.request.urlretrieve
30+
31+
dirpath = os.path.dirname(local_path)
32+
if not os.path.isdir(dirpath):
33+
os.mkdir(dirpath)
34+
35+
# Perform download
36+
_, resp = urlretrieve(url, local_path)
37+
return local_path
38+
39+
40+
def _download_file(directory, filename):
41+
url = _get_file_url(directory, filename)
42+
local_path = _retrieve_file(url, filename, directory)
43+
44+
if os.environ.get('DPF_DOCKER', False): # pragma: no cover
45+
# override path if running on docker as path must be relative
46+
# to docker mount
47+
local_path = os.path.join('/dpf/ansys/dpf/core/examples/_cache/', directory,
48+
filename)
49+
return local_path
50+
51+
###############################################################################
52+
# front-facing downloads
53+
54+
def download_transient_result():
55+
"""Download an example transient result and return the download path"""
56+
return _download_file('transient', 'transient.rst')

0 commit comments

Comments
 (0)