44 python : 2.7
55 dist : xenial
66 before_install :
7- - curl -LO http://www-us.apache.org/dist/spark/spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz
8- - export SPARK_HOME=./spark
9- - mkdir $SPARK_HOME
10- - tar -xf spark-2.4.4-bin-hadoop2.7.tgz -C $SPARK_HOME --strip-components=1
11- - export PATH=$SPARK_HOME/bin:$PATH
12- - export SPARK_LOCAL_IP=127.0.0.1
13- - export SPARK_CLASSPATH=./lib/tensorflow-hadoop-1.0-SNAPSHOT.jar
14- - export PYTHONPATH=$(pwd)
15- install :
7+ - source scripts/travis_install_spark.sh
168 - pip install --upgrade pip
9+ install :
1710 - pip install -r requirements.txt
1811 - pip install mock
1912 script :
@@ -22,24 +15,14 @@ matrix:
2215 python : 3.6
2316 dist : xenial
2417 before_install :
25- - curl -LO http://www-us.apache.org/dist/spark/spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz
26- - export SPARK_HOME=./spark
27- - mkdir $SPARK_HOME
28- - tar -xf spark-2.4.4-bin-hadoop2.7.tgz -C $SPARK_HOME --strip-components=1
29- - export PATH=$SPARK_HOME/bin:$PATH
30- - export SPARK_LOCAL_IP=127.0.0.1
31- - export SPARK_CLASSPATH=./lib/tensorflow-hadoop-1.0-SNAPSHOT.jar
32- - export PYTHONPATH=$(pwd)
33- install :
18+ - source scripts/travis_install_spark.sh
3419 - pip install --upgrade pip
20+ install :
3521 - pip install -r requirements.txt
3622 - pip list
3723 script :
3824 - sphinx-build -b html docs/source docs/build/html
3925 - test/run_tests.sh
40- # - language: java
41- # dist: xenial
42- # jdk: oraclejdk8
4326notifications :
4427 email : false
4528deploy :
@@ -62,11 +45,3 @@ deploy:
6245 python : 3.6
6346 tags : true
6447 condition : " $TRAVIS_TAG =~ ^v.*$"
65- - provider : script
66- script : mvn deploy -DskipTests --settings .travis.settings.xml
67- skip_cleanup : true
68- # on:
69- # branch: master
70- # jdk: oraclejdk8
71- # tags: true
72- # condition: "$TRAVIS_TAG =~ ^scala_.*$"
0 commit comments