Skip to content

Commit adcca66

Browse files
authored
Merge pull request #195 from s22s/feature/travis-set-python3
Another attempt at getting travis to use correct python version.
2 parents 7f71409 + e8d117c commit adcca66

File tree

4 files changed

+37
-44
lines changed

4 files changed

+37
-44
lines changed

.travis.yml

Lines changed: 11 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
sudo: false
22
dist: xenial
3-
language: scala
3+
language: python
4+
5+
python:
6+
- "3.7"
47

58
cache:
69
directories:
@@ -11,30 +14,24 @@ cache:
1114
scala:
1215
- 2.11.11
1316

14-
jdk:
15-
- openjdk8
16-
17-
python:
18-
- "3.7"
17+
env:
18+
- COURSIER_VERBOSITY=-1 JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
1919

2020
addons:
2121
apt:
2222
packages:
23+
- openjdk-8-jdk
2324
- pandoc
24-
- python-pip
2525

2626
install:
27-
- pip install setuptools
28-
29-
sbt_args: -no-colors
27+
- pip install rasterio shapely pandas numpy
28+
- wget -O - https://piccolo.link/sbt-1.2.8.tgz | tar xzf -
3029

3130
script:
32-
- sbt test
33-
- sbt it:test
31+
- sbt/bin/sbt -java-home $JAVA_HOME -batch test
32+
- sbt/bin/sbt -java-home $JAVA_HOME -batch it:test
3433
# - sbt -Dfile.encoding=UTF8 clean coverage test coverageReport
3534
# Tricks to avoid unnecessary cache updates
3635
- find $HOME/.sbt -name "*.lock" | xargs rm
3736
- find $HOME/.ivy2 -name "ivydata-*.properties" | xargs rm
3837

39-
#after_success:
40-
# - bash <(curl -s https://codecov.io/bash)

project/RFDependenciesPlugin.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ object RFDependenciesPlugin extends AutoPlugin {
5959
),
6060

6161
// NB: Make sure to update the Spark version in pyrasterframes/python/setup.py
62-
rfSparkVersion := "2.3.2",
62+
rfSparkVersion := "2.3.3",
6363
rfGeoTrellisVersion := "2.2.0",
6464
rfGeoMesaVersion := "2.2.1",
6565
dependencyOverrides += "com.azavea.gdal" % "gdal-warp-bindings" % "33.58d4965"

pyrasterframes/src/main/python/pyrasterframes/rf_types.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,8 @@ def __init__(self, cells, cell_type=None):
298298

299299
def __eq__(self, other):
300300
if type(other) is type(self):
301-
return self.cell_type == other.cell_type and np.ma.allequal(self.cells, other.cells)
301+
return self.cell_type == other.cell_type and \
302+
np.ma.allequal(self.cells, other.cells, fill_value=True)
302303
else:
303304
return False
304305

pyrasterframes/src/main/python/tests/PyRasterFramesTests.py

Lines changed: 23 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -228,28 +228,23 @@ def test_aggregations(self):
228228
self.assertEqual(row['rf_agg_stats(tile)'].data_cells, row['rf_agg_data_cells(tile)'])
229229

230230
def test_sql(self):
231-
self.rf.createOrReplaceTempView("rf")
232-
233-
dims = self.rf.withColumn('dims', rf_dimensions('tile')).first().dims
234-
dims_str = """{}, {}""".format(dims.cols, dims.rows)
235-
236-
self.spark.sql("""SELECT tile, rf_make_constant_tile(1, {}, 'uint16') AS One,
237-
rf_make_constant_tile(2, {}, 'uint16') AS Two FROM rf""".format(dims_str, dims_str)) \
238-
.createOrReplaceTempView("r3")
239-
240-
ops = self.spark.sql("""SELECT tile, rf_local_add(tile, One) AS AndOne,
241-
rf_local_subtract(tile, One) AS LessOne,
242-
rf_local_multiply(tile, Two) AS TimesTwo,
243-
rf_local_divide(tile, Two) AS OverTwo
244-
FROM r3""")
245-
246-
# ops.printSchema
247-
statsRow = ops.select(rf_tile_mean('tile').alias('base'),
248-
rf_tile_mean("AndOne").alias('plus_one'),
249-
rf_tile_mean("LessOne").alias('minus_one'),
250-
rf_tile_mean("TimesTwo").alias('double'),
251-
rf_tile_mean("OverTwo").alias('half')) \
252-
.first()
231+
self.rf.createOrReplaceTempView("rf_test_sql")
232+
233+
self.spark.sql("""SELECT tile,
234+
rf_local_add(tile, 1) AS and_one,
235+
rf_local_subtract(tile, 1) AS less_one,
236+
rf_local_multiply(tile, 2) AS times_two,
237+
rf_local_divide(tile, 2) AS over_two
238+
FROM rf_test_sql""").createOrReplaceTempView('rf_test_sql_1')
239+
240+
statsRow = self.spark.sql("""
241+
SELECT rf_tile_mean(tile) as base,
242+
rf_tile_mean(and_one) as plus_one,
243+
rf_tile_mean(less_one) as minus_one,
244+
rf_tile_mean(times_two) as double,
245+
rf_tile_mean(over_two) as half
246+
FROM rf_test_sql_1
247+
""").first()
253248

254249
self.assertTrue(self.rounded_compare(statsRow.base, statsRow.plus_one - 1))
255250
self.assertTrue(self.rounded_compare(statsRow.base, statsRow.minus_one + 1))
@@ -532,8 +527,6 @@ def less_pi(t):
532527

533528
class TileOps(TestEnvironment):
534529

535-
from pyrasterframes.rf_types import Tile
536-
537530
def setUp(self):
538531
# convenience so we can assert around Tile() == Tile()
539532
self.t1 = Tile(np.array([[1, 2],
@@ -589,9 +582,11 @@ def test_matmul(self):
589582
# r1 = self.t1 @ self.t2
590583
r1 = self.t1.__matmul__(self.t2)
591584

592-
nd = r1.cell_type.no_data_value()
593-
e1 = Tile(np.ma.masked_equal(np.array([[nd, 10],
594-
[nd, nd]], dtype=r1.cell_type.to_numpy_dtype()), nd))
585+
# The behavior of np.matmul with masked arrays is not well documented
586+
# it seems to treat the 2nd arg as if not a MaskedArray
587+
e1 = Tile(np.matmul(self.t1.cells, self.t2.cells), r1.cell_type)
588+
589+
self.assertTrue(r1 == e1, "{} was not equal to {}".format(r1, e1))
595590
self.assertEqual(r1, e1)
596591

597592

@@ -714,7 +709,7 @@ def test_strict_eval(self):
714709
# again for strict
715710
df_strict = self.spark.read.raster(self.img_uri, lazy_tiles=False)
716711
show_str_strict = df_strict.select('proj_raster')._jdf.showString(1, -1, False)
717-
self.assertTrue('RasterRef' not in show_str_lazy)
712+
self.assertTrue('RasterRef' not in show_str_strict)
718713

719714

720715
def test_prt_functions(self):

0 commit comments

Comments
 (0)