Skip to content

Commit 6ada5a4

Browse files
authored
Merge pull request #484 from s22s/feature/ci-it-rework
Updates to run IT tests in "medium" compute environment.
2 parents 0c01e8a + 7df6d21 commit 6ada5a4

File tree

9 files changed

+78
-63
lines changed

9 files changed

+78
-63
lines changed

.circleci/Dockerfile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ USER root
1010

1111
ENV PATH=$CONDA_DIR/bin:$PATH
1212

13+
# circleci is 3434
1314
COPY --chown=3434:3434 fix-permissions /tmp
1415

1516
RUN \
@@ -40,6 +41,7 @@ RUN \
4041
conda list python | grep '^python ' | tr -s ' ' | cut -d '.' -f 1,2 | sed 's/$/.*/' >> $CONDA_DIR/conda-meta/pinned && \
4142
conda install --quiet --yes conda && \
4243
conda install --quiet --yes pip && \
44+
pip config set global.progress_bar off && \
4345
echo "$CONDA_DIR/lib" > /etc/ld.so.conf.d/conda.conf && \
4446
conda clean --all --force-pkgs-dirs --yes --quiet && \
4547
sh /tmp/fix-permissions $CONDA_DIR 2> /dev/null

.circleci/config.yml

Lines changed: 38 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ orbs:
1414
working_directory: ~/repo
1515
environment:
1616
SBT_VERSION: 1.3.8
17-
SBT_OPTS: -Xmx512m
17+
SBT_OPTS: -Xmx768m
1818
commands:
1919
setup:
2020
description: Setup for sbt build
@@ -23,6 +23,14 @@ orbs:
2323
name: Setup sbt
2424
command: 'true' # NOOP
2525

26+
compile:
27+
description: Do just the compilation stage to minimize sbt memory footprint
28+
steps:
29+
- run:
30+
name: "Compile Scala via sbt"
31+
command: |-
32+
sbt -v -batch compile test:compile it:compile
33+
2634
python:
2735
commands:
2836
setup:
@@ -31,9 +39,6 @@ orbs:
3139
- run:
3240
name: Install Python and PIP
3341
command: |-
34-
sudo apt-get update -q -y
35-
sudo apt-get install python3 python3-pip
36-
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.7 1
3742
python -m pip install --user 'setuptools>=45.2'
3843
3944
requirements:
@@ -118,10 +123,7 @@ jobs:
118123
- python/setup
119124
- rasterframes/setup
120125
- rasterframes/restore-cache
121-
122-
- run:
123-
name: "Compile Scala"
124-
command: sbt -v -batch compile
126+
- sbt/compile
125127

126128
- run:
127129
name: "Scala Tests: core"
@@ -157,42 +159,52 @@ jobs:
157159
- python/requirements
158160
- rasterframes/setup
159161
- rasterframes/restore-cache
162+
- sbt/compile
160163

161164
- run:
162165
name: Build documentation
163-
command: cat /dev/null | sbt makeSite
164-
no-output-timeout: 30m
166+
command: sbt makeSite
167+
no_output_timeout: 30m
165168

166169
- rasterframes/save-doc-artifacts
167170
- rasterframes/save-cache
168171

169172
it:
170173
executor: sbt/default
174+
resource_class: large
171175
steps:
172176
- checkout
173177
- sbt/setup
174178
- rasterframes/setup
175179
- rasterframes/restore-cache
180+
- sbt/compile
176181

177182
- run:
178183
name: Integration tests
179-
command: cat /dev/null | sbt it:test
184+
command: sbt it:test
180185
no_output_timeout: 30m
181186

182187
- rasterframes/save-artifacts
183188
- rasterframes/save-cache
184189

185-
itWithoutGdal:
190+
it-no-gdal:
186191
executor: sbt/default
192+
resource_class: large
187193
steps:
188194
- checkout
189195
- sbt/setup
190196
- rasterframes/setup
191197
- rasterframes/restore-cache
192198

199+
- run:
200+
name: Uninstall GDAL
201+
command: conda remove gdal -q -y --offline
202+
203+
- sbt/compile
204+
193205
- run:
194206
name: Integration tests
195-
command: cat /dev/null | sbt it:test
207+
command: sbt it:test
196208
no_output_timeout: 30m
197209

198210
- rasterframes/save-artifacts
@@ -204,18 +216,27 @@ workflows:
204216
jobs:
205217
- test:
206218
context: rasterframes
219+
207220
- it:
208221
context: rasterframes
222+
# requires:
223+
# - test
209224
filters:
210225
branches:
211226
only:
212-
- /feature\/.*-its/
213-
- itWithoutGdal:
227+
- /feature\/.*-it.*/
228+
- /it\/.*/
229+
230+
- it-no-gdal:
214231
context: rasterframes
232+
# requires:
233+
# - test
215234
filters:
216235
branches:
217236
only:
218-
- /feature\/.*-its/
237+
- /feature\/.*-it.*/
238+
- /it\/.*/
239+
219240
- docs:
220241
context: rasterframes
221242
filters:
@@ -236,5 +257,5 @@ workflows:
236257
jobs:
237258
- test
238259
- it
239-
- itWithoutGdal
260+
- it-no-gdal
240261
- docs

.sbtopts

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1 @@
1-
-J-XX:+HeapDumpOnOutOfMemoryError
2-
-J-XX:HeapDumpPath=/tmp
3-
-J-XX:+CMSClassUnloadingEnabled
4-
-J-XX:MaxMetaspaceSize=256m
5-
-J-XX:ReservedCodeCacheSize=128m
1+

.travis.yml

Lines changed: 0 additions & 36 deletions
This file was deleted.

build.sbt

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -113,11 +113,13 @@ lazy val datasource = project
113113
spark("mllib").value % Provided,
114114
spark("sql").value % Provided
115115
),
116-
initialCommands in console := (initialCommands in console).value +
116+
console / initialCommands := (console / initialCommands).value +
117117
"""
118118
|import org.locationtech.rasterframes.datasource.geotrellis._
119119
|import org.locationtech.rasterframes.datasource.geotiff._
120-
|""".stripMargin
120+
|""".stripMargin,
121+
IntegrationTest / fork := true,
122+
IntegrationTest / javaOptions := Seq("-Xmx3g")
121123
)
122124

123125
lazy val experimental = project
@@ -133,8 +135,8 @@ lazy val experimental = project
133135
spark("mllib").value % Provided,
134136
spark("sql").value % Provided
135137
),
136-
fork in IntegrationTest := true,
137-
//javaOptions in IntegrationTest := Seq("-Xmx2G")
138+
IntegrationTest / fork := true,
139+
IntegrationTest / javaOptions := (datasource / IntegrationTest / javaOptions).value
138140
)
139141

140142
lazy val docs = project

core/src/it/resources/log4j.properties

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,8 @@ log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
4040
log4j.logger.org.locationtech.rasterframes=WARN
4141
log4j.logger.org.locationtech.rasterframes.ref=WARN
4242
log4j.logger.org.apache.parquet.hadoop.ParquetRecordReader=OFF
43+
log4j.logger.geotrellis.spark=INFO
44+
log4j.logger.geotrellis.raster.gdal=ERROR
4345

4446
# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
4547
log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL

docs/src/main/paradox/release-notes.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,14 +23,15 @@
2323
- Revisit use of `Tile` equality since [it's more strict](https://github.com/locationtech/geotrellis/pull/2991)
2424
- Update `reference.conf` to use `geotrellis.raster.gdal` namespace.
2525
- Replace all uses of `TileDimensions` with `geotrellis.raster.Dimensions[Int]`.
26+
* Upgraded to `gdal-warp-bindings` 1.0.0.
27+
* Upgraded to Spark 2.4.5
2628
* Formally abandoned support for Python 2. Python 2 is dead. Long live Python 2.
2729
* Introduction of type hints in Python API.
2830
* Add functions for changing cell values based on either conditions or to achieve a distribution of values. ([#449](https://github.com/locationtech/rasterframes/pull/449))
2931
* Add `rf_local_min`, `rf_local_max`, and `rf_local_clip` functions.
3032
* Add cell value scaling functions `rf_rescale` and `rf_standardize`.
3133
* Add `rf_where` function, similar in spirit to numpy's `where`, or a cell-wise version of Spark SQL's `when` and `otherwise`.
3234
* Add `rf_sqrt` function to compute cell-wise square root.
33-
* Upgraded to Spark 2.4.5
3435

3536
## 0.8.x
3637

pyrasterframes/src/main/python/docs/__init__.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,25 @@
2020

2121
from pweave import PwebPandocFormatter
2222

23+
# Setuptools/easy_install doesn't properly set the execute bit on the Spark scripts,
24+
# So this preemptively attempts to do it.
25+
def _chmodit():
26+
try:
27+
from importlib.util import find_spec
28+
import os
29+
module_home = find_spec("pyspark").origin
30+
print(module_home)
31+
bin_dir = os.path.join(os.path.dirname(module_home), 'bin')
32+
for filename in os.listdir(bin_dir):
33+
try:
34+
os.chmod(os.path.join(bin_dir, filename), mode=0o555, follow_symlinks=True)
35+
except OSError:
36+
pass
37+
except ImportError:
38+
pass
39+
40+
_chmodit()
41+
2342

2443
class PegdownMarkdownFormatter(PwebPandocFormatter):
2544
def __init__(self, *args, **kwargs):

pyrasterframes/src/main/python/pyrasterframes/utils.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,12 @@ def find_pyrasterframes_assembly() -> Union[bytes, str]:
6868
return jarpath[0]
6969

7070

71+
def quiet_logs(sc):
72+
logger = sc._jvm.org.apache.log4j
73+
logger.LogManager.getLogger("geotrellis.raster.gdal").setLevel(logger.Level.ERROR)
74+
logger.LogManager.getLogger("akka").setLevel(logger.Level.ERROR)
75+
76+
7177
def create_rf_spark_session(master="local[*]", **kwargs: str) -> SparkSession:
7278
""" Create a SparkSession with pyrasterframes enabled and configured. """
7379
jar_path = find_pyrasterframes_assembly()
@@ -86,6 +92,8 @@ def create_rf_spark_session(master="local[*]", **kwargs: str) -> SparkSession:
8692
.config(conf=conf) # user can override the defaults
8793
.getOrCreate())
8894

95+
quiet_logs(spark)
96+
8997
try:
9098
spark.withRasterFrames()
9199
return spark

0 commit comments

Comments
 (0)