Skip to content

Commit 6f042b4

Browse files
author
Lee Yang
committed
migrate build from travis to screwdriver
1 parent 2134791 commit 6f042b4

23 files changed

+302
-96
lines changed

.travis.yml

Lines changed: 0 additions & 35 deletions
This file was deleted.

docs/source/conf.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,9 @@
2828
author = 'Yahoo Inc'
2929

3030
# The short X.Y version
31-
version = '2.2.1'
31+
version = '2.2.2'
3232
# The full version, including alpha/beta/rc tags
33-
release = '2.2.1'
33+
release = '2.2.2'
3434

3535

3636
# -- General configuration ---------------------------------------------------

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
<modelVersion>4.0.0</modelVersion>
66
<groupId>com.yahoo.ml</groupId>
77
<artifactId>tensorflowonspark</artifactId>
8-
<version>2.2.0-SNAPSHOT</version>
8+
<version>2.2.2-SNAPSHOT</version>
99
<packaging>jar</packaging>
1010
<name>tensorflowonspark</name>
1111
<description>Spark Scala inferencing for TensorFlowOnSpark</description>

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ h5py>=2.9.0
22
numpy>=1.14.0
33
packaging
44
py4j==0.10.7
5-
pyspark==2.4.5
5+
pyspark==2.4.7
66
scipy
77
setuptools>=41.0.0
88
sphinx

screwdriver.yaml

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# Copyright 2017, Verizon Inc.
2+
# Licensed under the terms of the apache license. See the LICENSE file in the project root for terms
3+
4+
version: 4
5+
shared:
6+
environment:
7+
PACKAGE_DIRECTORY: tensorflowonspark
8+
SPARK_HOME: ${SD_ROOT_DIR}/spark
9+
TOX_ARGS: '--verbose'
10+
TOX_ENVLIST: py37
11+
annotations:
12+
screwdriver.cd/cpu: HIGH
13+
screwdriver.cd/ram: HIGH
14+
15+
jobs:
16+
validate_test:
17+
template: python/validate_unittest
18+
requires: [~commit, ~pr]
19+
steps:
20+
- prevalidate_code: |
21+
source scripts/install_spark.sh
22+
23+
validate_lint:
24+
template: python/validate_lint
25+
requires: [~commit, ~pr]
26+
27+
validate_codestyle:
28+
template: python/validate_codestyle
29+
requires: [~commit, ~pr]
30+
31+
validate_safetydb:
32+
template: python/validate_safety
33+
requires: [~commit, ~pr]
34+
35+
# validate_security:
36+
# template: python/validate_security
37+
# requires: [~commit, ~pr]
38+
39+
publish_test_pypi:
40+
template: python/package_python
41+
environment:
42+
PUBLISH: True
43+
TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/
44+
requires: [validate_test, validate_lint, validate_codestyle, validate_safetydb, generate_version]
45+
steps:
46+
- update_version: |
47+
echo 'using version from setup.cfg'
48+
49+
# publish_pypi:
50+
# template: python/package_python
51+
# environment:
52+
# PUBLISH: True
53+
# requires: [verify_test_package]

scripts/install_spark.sh

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
#!/bin/bash -x
2+
3+
# Install JDK8
4+
yum install -y java-1.8.0-openjdk
5+
export JAVA_HOME=/usr/lib/jvm/jre-1.8.0
6+
7+
# Install Spark
8+
export SPARK_VERSION=2.4.7
9+
curl -LO http://www-us.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop2.7.tgz
10+
mkdir $SPARK_HOME
11+
tar -xf spark-${SPARK_VERSION}-bin-hadoop2.7.tgz -C $SPARK_HOME --strip-components=1

scripts/start_spark.sh

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash -x
2+
#export SPARK_HOME=/opt/spark
3+
#export SPARK_LOCAL_IP=127.0.0.1
4+
#export PATH=$SPARK_HOME/bin:$PATH
5+
#
6+
## Start Spark Standalone Cluster
7+
#export SPARK_CLASSPATH=./lib/tensorflow-hadoop-1.0-SNAPSHOT.jar
8+
#export MASTER=spark://$(hostname):7077
9+
#export SPARK_WORKER_INSTANCES=2; export CORES_PER_WORKER=1
10+
#export TOTAL_CORES=$((${CORES_PER_WORKER}*${SPARK_WORKER_INSTANCES}))
11+
12+
${SPARK_HOME}/sbin/start-master.sh; ${SPARK_HOME}/sbin/start-slave.sh -c ${CORES_PER_WORKER} -m 1G ${MASTER}

scripts/stop_spark.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
#!/bin/bash -x
2+
3+
${SPARK_HOME}/sbin/stop-slave.sh; ${SPARK_HOME}/sbin/stop-master.sh

scripts/travis_before_install.sh

Lines changed: 0 additions & 28 deletions
This file was deleted.

sd.allow

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
version: 1
2+
push:
3+
- screwdriver:6384
4+
- screwdriver:6384pr

0 commit comments

Comments
 (0)