Skip to content

Commit dbfb653

Browse files
authored
Merge pull request #27 from TerrainBento/barnhark/fix_dakota_error
Barnhark/fix dakota error
2 parents 7d1241f + 2c5118e commit dbfb653

File tree

5 files changed

+47
-55
lines changed

5 files changed

+47
-55
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,6 @@ install:
6363
script:
6464
- pip install pytest pytest-cov coveralls
6565
- pip install jupyter pandas plotnine holoviews tqdm rasterio
66-
- pip install terrainbento --pre
66+
- pip install --pre terrainbento
6767
- pytest umami tests/ --doctest-modules --cov=umami --cov-report=xml:$(pwd)/coverage.xml -vvv
6868
after_success: coveralls

appveyor.yml

Lines changed: 15 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -4,46 +4,41 @@ environment:
44
matrix:
55

66
- TARGET_ARCH: x64
7-
CONDA_NPY: 111
8-
CONDA_INSTALL_LOCN: C:\\Miniconda37-x64
9-
CONDA_PY: 3.6
7+
PYTHON: "C:\\Python38-x64"
8+
MINICONDA: "C:\\Miniconda3-x64"
109

1110
- TARGET_ARCH: x64
12-
CONDA_NPY: 111
13-
CONDA_INSTALL_LOCN: C:\\Miniconda36-x64
14-
CONDA_PY: 3.7
11+
PYTHON: "C:\\Python37-x64"
12+
MINICONDA: "C:\\Miniconda3-x64"
1513

1614
- TARGET_ARCH: x64
17-
CONDA_NPY: 111
18-
CONDA_INSTALL_LOCN: C:\\Miniconda37-x64
19-
CONDA_PY: 3.8
15+
PYTHON: "C:\\Python36-x64"
16+
MINICONDA: "C:\\Miniconda3-x64"
2017

2118
platform:
2219
- x64
2320

24-
os: Previous Visual Studio 2015
25-
2621
init:
27-
- "ECHO %CONDA_INSTALL_LOCN% %CONDA_PY% %HOME% %PLATFORM%"
22+
- "ECHO %PYTHON% %MINICONDA% %HOME% %PLATFORM%"
2823
- "ECHO %APPVEYOR_REPO_BRANCH%"
2924

3025
install:
31-
- cmd: call %CONDA_INSTALL_LOCN%\Scripts\activate.bat
26+
- cmd: call %MINICONDA%\Scripts\activate.bat
27+
- "python -VV"
3228
- cmd: conda update --yes --quiet conda
33-
- cmd: set PYTHONUNBUFFERED=1
3429
- cmd: conda config --set always_yes yes
35-
- cmd: pip install pytest
36-
- cmd: pip install jupyter pandas plotnine holoviews tqdm
37-
- cmd: pip install terrainbento --pre
38-
- cmd: conda install rasterio -c conda-forge
39-
- cmd: conda install landlab -c conda-forge
30+
- cmd: conda config --add channels conda-forge
31+
- cmd: conda info
32+
- cmd: conda list
33+
- cmd: conda search landlab
34+
- cmd: conda env create -f environment-dev.yml
35+
- cmd: conda activate umami-dev
4036
- cmd: conda info
4137
- cmd: conda list
4238

4339
build: false
4440

4541
test_script:
46-
- pip install numpy
4742
- pip install -e .
4843
- pytest -vvv
4944

notebooks/OtherIO_options.ipynb

Lines changed: 21 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
"\n",
4141
"import rasterio\n",
4242
"\n",
43-
"from landlab import imshow_grid, RasterModelGrid, VoronoiDelaunayGrid\n",
43+
"from landlab import imshow_grid, RasterModelGrid, HexModelGrid\n",
4444
"\n",
4545
"from umami import Metric"
4646
]
@@ -252,11 +252,11 @@
252252
"cell_type": "markdown",
253253
"metadata": {},
254254
"source": [
255-
"## Step 3: Use irregular data and a `VoronoiDelaunayGrid`\n",
255+
"## Step 3: Use irregular data and a `HexModelGrid`\n",
256256
"\n",
257257
"As a final example, we will look at specifying umami with an irregular grid. We won't import any standard format of irregular data but will create some by interpolating the regular data using the scipy tool [RegularGridInterpolator](https://docs.scipy.org/doc/scipy-0.16.0/reference/generated/scipy.interpolate.RegularGridInterpolator.html).\n",
258258
"\n",
259-
"We use a tenth as many grid nodes as we had in the prior example. This is just for speed, feel free to adjust the value for `factor` to change this. \n",
259+
"We use a smaller number of nodes as we had in the prior example. This is just for speed, feel free to adjust the value for `factor` to change this. \n",
260260
"\n",
261261
"We start by creating a set of grid node locations in x and y. "
262262
]
@@ -267,15 +267,13 @@
267267
"metadata": {},
268268
"outputs": [],
269269
"source": [
270-
"factor = 11\n",
271-
"nnodes = int(rmg.x_of_node.size / factor)\n",
272-
"np.random.seed(27)\n",
270+
"factor = 5\n",
271+
"dx = rmg.spacing[0] * factor\n",
273272
"\n",
274-
"# select a random subset of x_of_node and y_of_node and permute by a small quantity.\n",
275-
"# permute only in x, which allows the ordering of nodes to be maintained \n",
276-
"index = np.linspace(0, rmg.x_of_node.size-1, nnodes, dtype=int)\n",
277-
"random_x = rmg.x_of_node[index] + 0.4 * rmg.spacing[0] * np.random.randn(index.size)\n",
278-
"random_y = rmg.y_of_node[index] "
273+
"hmg = HexModelGrid((int(rmg.shape[0]/factor*1.2), int(rmg.shape[1]/factor)+1), \n",
274+
" dx, \n",
275+
" node_layout=\"rect\", \n",
276+
" xy_of_lower_left=rmg.xy_of_lower_left)"
279277
]
280278
},
281279
{
@@ -292,7 +290,7 @@
292290
"outputs": [],
293291
"source": [
294292
"plt.plot(rmg.x_of_node, rmg.y_of_node, 'k.', markersize=2, label=\"Raster Points\")\n",
295-
"plt.plot(random_x, random_y, 'm.', label=\"Irregular Points\")\n",
293+
"plt.plot(hmg.x_of_node, hmg.y_of_node, 'm.', label=\"Irregular Points\")\n",
296294
"plt.xlim(-105.40, -105.375)\n",
297295
"plt.ylim(40.00, 40.025)"
298296
]
@@ -314,14 +312,14 @@
314312
" rmg.x_of_node.reshape(rmg.shape)[0, :]),\n",
315313
" z.reshape(rmg.shape), bounds_error=False, fill_value=None)\n",
316314
"\n",
317-
"interp_z = interp_obj((random_y, random_x))"
315+
"interp_z = interp_obj((hmg.y_of_node, hmg.x_of_node))"
318316
]
319317
},
320318
{
321319
"cell_type": "markdown",
322320
"metadata": {},
323321
"source": [
324-
"Next we create a `VoronoiDelaunayGrid` and add `topographic__elevation` to it. \n",
322+
"Next we create a `HexModelGrid` and add `topographic__elevation` to it. \n",
325323
"\n",
326324
"One nice feature of the `imshow_grid` function is that it works for both regular and irregular grids. "
327325
]
@@ -332,10 +330,9 @@
332330
"metadata": {},
333331
"outputs": [],
334332
"source": [
335-
"vdg = VoronoiDelaunayGrid(random_x, random_y)\n",
336-
"z = vdg.add_field(\"topographic__elevation\", interp_z, at=\"node\")\n",
333+
"z = hmg.add_field(\"topographic__elevation\", interp_z, at=\"node\")\n",
337334
"\n",
338-
"imshow_grid(vdg, z, cmap=\"terrain\")"
335+
"imshow_grid(hmg, z, cmap=\"terrain\")"
339336
]
340337
},
341338
{
@@ -367,8 +364,8 @@
367364
" }\n",
368365
"}\n",
369366
"\n",
370-
"vmg_metric = Metric(vdg, metrics=metrics)\n",
371-
"vmg_metric.calculate()"
367+
"hmg_metric = Metric(hmg, metrics=metrics)\n",
368+
"hmg_metric.calculate()"
372369
]
373370
},
374371
{
@@ -377,7 +374,7 @@
377374
"metadata": {},
378375
"outputs": [],
379376
"source": [
380-
"vmg_metric.names"
377+
"hmg_metric.names"
381378
]
382379
},
383380
{
@@ -386,7 +383,7 @@
386383
"metadata": {},
387384
"outputs": [],
388385
"source": [
389-
"vmg_metric.values"
386+
"hmg_metric.values"
390387
]
391388
},
392389
{
@@ -402,9 +399,9 @@
402399
"metadata": {},
403400
"outputs": [],
404401
"source": [
405-
"for n in vmg_metric.names:\n",
406-
" abs_change = np.abs(vmg_metric.value(n) - rmg_metric.value(n))\n",
407-
" pct_change = abs_change /( 2* (vmg_metric.value(n) + rmg_metric.value(n)))\n",
402+
"for n in hmg_metric.names:\n",
403+
" abs_change = np.abs(hmg_metric.value(n) - rmg_metric.value(n))\n",
404+
" pct_change = abs_change /( (hmg_metric.value(n) + rmg_metric.value(n))/2)\n",
408405
" print(n, \"\\n abs_change: \", abs_change, \"\\n pct_change: \", pct_change)"
409406
]
410407
},

umami/metric.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -220,10 +220,10 @@ def write_metrics_to_file(self, path, style, decimals=3):
220220
>>> file_contents = out.getvalue().splitlines()
221221
>>> for line in file_contents:
222222
... print(line.strip())
223-
9.0 # me
224-
5.0 # ep10
225-
5.0 # oid1_mean
226-
8 # sn1
223+
9.0 me
224+
5.0 ep10
225+
5.0 oid1_mean
226+
8 sn1
227227
228228
Next we output in *yaml* style, in which each metric is serialized in
229229
YAML format.
@@ -241,7 +241,7 @@ def write_metrics_to_file(self, path, style, decimals=3):
241241
if style == "dakota":
242242
stream = "\n".join(
243243
[
244-
str(np.round(val, decimals=decimals)) + " # " + str(key)
244+
str(np.round(val, decimals=decimals)) + " " + str(key)
245245
for key, val in self._values.items()
246246
]
247247
)

umami/residual.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -322,10 +322,10 @@ def write_residuals_to_file(self, path, style, decimals=3):
322322
>>> file_contents = out.getvalue().splitlines()
323323
>>> for line in file_contents:
324324
... print(line.strip())
325-
17.533 # me
326-
9.909 # ep10
327-
9.813 # oid1_mean
328-
-41 # sn1
325+
17.533 me
326+
9.909 ep10
327+
9.813 oid1_mean
328+
-41 sn1
329329
330330
Next we output in *yaml* style, in which each metric is serialized in
331331
YAML format.
@@ -343,7 +343,7 @@ def write_residuals_to_file(self, path, style, decimals=3):
343343
if style == "dakota":
344344
stream = "\n".join(
345345
[
346-
str(np.round(val, decimals=decimals)) + " # " + str(key)
346+
str(np.round(val, decimals=decimals)) + " " + str(key)
347347
for key, val in self._values.items()
348348
]
349349
)

0 commit comments

Comments
 (0)