Skip to content

Commit 4dcecd7

Browse files
author
Sumedh Wale
committed
[SNAPPYDATA] upgrade gradle to v5.6.4, the latest 5.x release
- also updated scalatest and shadow plugins and targets accordingly - fixed scalaStyle errors - updated jackson dependencies - added synchronization in MetricsSystem (exposed occasionally by spark-metrics) - fixed test failures (due to updated hadoop and few others) - added license headers for modified files
1 parent bbc94e1 commit 4dcecd7

File tree

26 files changed

+326
-60
lines changed

26 files changed

+326
-60
lines changed

build.gradle

Lines changed: 31 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ buildscript {
2727
mavenCentral()
2828
}
2929
dependencies {
30-
classpath 'io.snappydata:gradle-scalatest:0.23'
30+
classpath 'io.snappydata:gradle-scalatest:0.25'
3131
classpath 'org.github.ngbinh.scalastyle:gradle-scalastyle-plugin_2.11:0.9.0'
32-
classpath 'com.github.jengelman.gradle.plugins:shadow:4.0.3'
32+
classpath 'com.github.jengelman.gradle.plugins:shadow:5.2.0'
3333
classpath 'com.commercehub.gradle.plugin:gradle-avro-plugin:0.8.0'
3434
}
3535
}
@@ -43,22 +43,22 @@ allprojects {
4343

4444
repositories {
4545
mavenCentral()
46-
maven { url 'http://repository.apache.org/snapshots' }
46+
maven { url 'https://repo.hortonworks.com/content/repositories/releases/' }
4747
}
4848

4949
apply plugin: 'java'
5050
apply plugin: 'com.github.johnrengelman.shadow'
5151
apply plugin: 'idea'
5252

5353
group = 'io.snappydata'
54-
version = snappySparkVersion
55-
productName = productName
54+
version = '2.1.1.9'
5655

5756
ext {
57+
productName = 'SnappyData'
5858
vendorName = 'TIBCO Software Inc.'
5959
scalaBinaryVersion = '2.11'
6060
scalaVersion = scalaBinaryVersion + '.8'
61-
hadoopVersion = '2.7.7'
61+
hadoopVersion = '3.2.0'
6262
protobufVersion = '3.6.1'
6363
jerseyVersion = '2.22.2'
6464
sunJerseyVersion = '1.19.4'
@@ -81,7 +81,6 @@ allprojects {
8181
levelDbJniVersion = '1.8'
8282
jackson1Version = '1.9.13'
8383
jacksonVersion = '2.9.9'
84-
jacksonBindVersion = '2.9.9'
8584
snappyJavaVersion = '1.1.7.2'
8685
lz4Version = '1.5.0'
8786
lzfVersion = '1.0.4'
@@ -170,6 +169,22 @@ allprojects {
170169
}
171170
}
172171

172+
// set python2 for pyspark if python3 version is an unsupported one
173+
String sparkPython = 'python'
174+
def checkResult = exec {
175+
ignoreExitValue = true
176+
commandLine 'sh', '-c', 'python --version 2>/dev/null | grep -Eq "( 3\\.[0-7])|( 2\\.)"'
177+
}
178+
if (checkResult.exitValue != 0) {
179+
checkResult = exec {
180+
ignoreExitValue = true
181+
commandLine 'sh', '-c', 'python2 --version >/dev/null 2>&1'
182+
}
183+
if (checkResult.exitValue == 0) {
184+
sparkPython = 'python2'
185+
}
186+
}
187+
173188
def getStackTrace(def t) {
174189
java.io.StringWriter sw = new java.io.StringWriter()
175190
java.io.PrintWriter pw = new java.io.PrintWriter(sw)
@@ -193,6 +208,10 @@ subprojects {
193208
apply plugin: 'maven'
194209
apply plugin: 'scalaStyle'
195210

211+
int maxWorkers = project.hasProperty('org.gradle.workers.max') ?
212+
project.property('org.gradle.workers.max') as int :
213+
Runtime.getRuntime().availableProcessors()
214+
196215
// apply compiler options
197216
compileJava.options.encoding = 'UTF-8'
198217
compileJava.options.compilerArgs << '-Xlint:all,-serial,-path,-deprecation'
@@ -279,8 +298,7 @@ subprojects {
279298

280299
if (rootProject.name == 'snappy-spark') {
281300
task scalaTest(type: Test) {
282-
def factory = new com.github.maiflai.BackwardsCompatibleJavaExecActionFactory(gradle.gradleVersion)
283-
actions = [ new com.github.maiflai.ScalaTestAction(factory) ]
301+
actions = [ new com.github.maiflai.ScalaTestAction() ]
284302

285303
testLogging.exceptionFormat = TestExceptionFormat.FULL
286304
testLogging.events = TestLogEvent.values() as Set
@@ -326,7 +344,7 @@ subprojects {
326344
}
327345
test {
328346
jvmArgs '-Xss4096k'
329-
maxParallelForks = Runtime.getRuntime().availableProcessors()
347+
maxParallelForks = maxWorkers
330348
systemProperties 'spark.master.rest.enabled': 'false',
331349
'test.src.tables': 'src'
332350

@@ -369,6 +387,8 @@ gradle.taskGraph.whenReady { graph ->
369387
'SPARK_PREPEND_CLASSES': '1',
370388
'SPARK_SCALA_VERSION': scalaBinaryVersion,
371389
'SPARK_TESTING': '1',
390+
'PYSPARK_PYTHON': sparkPython,
391+
'PYSPARK_DRIVER_PYTHON': sparkPython,
372392
'JAVA_HOME': System.getProperty('java.home')
373393
systemProperties 'log4j.configuration': "file:${projectDir}/src/test/resources/log4j.properties",
374394
'derby.system.durability': 'test',
@@ -425,10 +445,7 @@ if (rootProject.name == 'snappy-spark') {
425445
task scalaStyle {
426446
dependsOn subprojects.scalaStyle
427447
}
428-
task check {
429-
dependsOn subprojects.check
430-
}
431448
} else {
432449
scalaStyle.dependsOn subprojects.scalaStyle
433-
check.dependsOn subprojects.check
434450
}
451+
check.dependsOn subprojects.check

common/network-common/build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ dependencies {
2424
compile group: 'com.google.code.findbugs', name: 'jsr305', version: jsr305Version
2525
compile group: 'com.google.guava', name: 'guava', version: guavaVersion
2626
compile group: 'org.fusesource.leveldbjni', name: 'leveldbjni-all', version: levelDbJniVersion
27-
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonBindVersion
27+
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion
2828
compile group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: jacksonVersion
2929
compile group: 'org.apache.commons', name: 'commons-lang3', version: commonsLang3Version
3030

common/network-shuffle/build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ dependencies {
2222
compile project(subprojectBase + 'snappy-spark-tags_' + scalaBinaryVersion)
2323

2424
compile group: 'org.fusesource.leveldbjni', name: 'leveldbjni-all', version: levelDbJniVersion
25-
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonBindVersion
25+
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion
2626
compile group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: jacksonVersion
2727
compile group: 'com.google.guava', name: 'guava', version: guavaVersion
2828
compile(group: 'io.dropwizard.metrics', name: 'metrics-core', version: metricsVersion) {

common/network-yarn/build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ dependencies {
4747
runtimeJar project(subprojectBase + 'snappy-spark-network-common_' + scalaBinaryVersion)
4848
runtimeJar project(subprojectBase + 'snappy-spark-network-shuffle_' + scalaBinaryVersion)
4949
runtimeJar group: 'io.netty', name: 'netty-all', version: nettyAllVersion
50-
runtimeJar group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonBindVersion
50+
runtimeJar group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion
5151
runtimeJar group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: jacksonVersion
5252
*/
5353
}

core/build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ dependencies {
142142
exclude(group: 'org.slf4j', module: 'slf4j-api')
143143
exclude(group: 'org.slf4j', module: 'slf4j-log4j12')
144144
}
145-
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonBindVersion
145+
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion
146146
compile(group: 'com.fasterxml.jackson.module', name: 'jackson-module-scala_' + scalaBinaryVersion, version: jacksonVersion) {
147147
exclude(group: 'org.scala-lang', module: 'scala-library')
148148
exclude(group: 'org.scala-lang', module: 'scala-reflect')

core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,24 @@
1414
* See the License for the specific language governing permissions and
1515
* limitations under the License.
1616
*/
17+
/*
18+
* Changes for TIBCO Project SnappyData data platform.
19+
*
20+
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
21+
*
22+
* Licensed under the Apache License, Version 2.0 (the "License"); you
23+
* may not use this file except in compliance with the License. You
24+
* may obtain a copy of the License at
25+
*
26+
* http://www.apache.org/licenses/LICENSE-2.0
27+
*
28+
* Unless required by applicable law or agreed to in writing, software
29+
* distributed under the License is distributed on an "AS IS" BASIS,
30+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
31+
* implied. See the License for the specific language governing
32+
* permissions and limitations under the License. See accompanying
33+
* LICENSE file.
34+
*/
1735

1836
package org.apache.spark.metrics
1937

@@ -159,10 +177,11 @@ private[spark] class MetricsSystem private (
159177
} else { defaultName }
160178
}
161179

162-
def getSourcesByName(sourceName: String): Seq[Source] =
180+
def getSourcesByName(sourceName: String): Seq[Source] = synchronized {
163181
sources.filter(_.sourceName == sourceName)
182+
}
164183

165-
def registerSource(source: Source) {
184+
def registerSource(source: Source): Unit = synchronized {
166185
sources += source
167186
try {
168187
val regName = buildRegistryName(source)
@@ -172,7 +191,7 @@ private[spark] class MetricsSystem private (
172191
}
173192
}
174193

175-
def removeSource(source: Source) {
194+
def removeSource(source: Source): Unit = synchronized {
176195
sources -= source
177196
val regName = buildRegistryName(source)
178197
registry.removeMatching(new MetricFilter {

core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,24 @@
1414
* See the License for the specific language governing permissions and
1515
* limitations under the License.
1616
*/
17+
/*
18+
* Changes for TIBCO Project SnappyData data platform.
19+
*
20+
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
21+
*
22+
* Licensed under the Apache License, Version 2.0 (the "License"); you
23+
* may not use this file except in compliance with the License. You
24+
* may obtain a copy of the License at
25+
*
26+
* http://www.apache.org/licenses/LICENSE-2.0
27+
*
28+
* Unless required by applicable law or agreed to in writing, software
29+
* distributed under the License is distributed on an "AS IS" BASIS,
30+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
31+
* implied. See the License for the specific language governing
32+
* permissions and limitations under the License. See accompanying
33+
* LICENSE file.
34+
*/
1735

1836
package org.apache.spark.scheduler
1937

@@ -22,6 +40,7 @@ import java.util.concurrent.{ExecutorService, RejectedExecutionException}
2240

2341
import scala.language.existentials
2442
import scala.util.control.NonFatal
43+
2544
import org.apache.spark._
2645
import org.apache.spark.TaskState.TaskState
2746
import org.apache.spark.internal.Logging

core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
/*
1818
* Changes for TIBCO Project SnappyData data platform.
1919
*
20-
* Portions Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved.
20+
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
2121
*
2222
* Licensed under the Apache License, Version 2.0 (the "License"); you
2323
* may not use this file except in compliance with the License. You
@@ -43,6 +43,7 @@ import java.util.concurrent.atomic.AtomicLong
4343
import scala.collection.Set
4444
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
4545
import scala.util.Random
46+
4647
import org.apache.spark._
4748
import org.apache.spark.TaskState.TaskState
4849
import org.apache.spark.internal.Logging
@@ -195,8 +196,9 @@ private[spark] class TaskSchedulerImpl(
195196
val tasks = taskSet.tasks
196197
logInfo("Adding task set " + taskSet.id + " with " + tasks.length + " tasks")
197198
this.synchronized {
198-
val maxRetryAttemptsForWrite = taskSet.properties.
199-
getProperty(SNAPPY_WRITE_RETRY_PROP)
199+
val maxRetryAttemptsForWrite =
200+
if (taskSet.properties ne null) taskSet.properties.getProperty(SNAPPY_WRITE_RETRY_PROP)
201+
else null
200202

201203
logInfo("The maxRetryAttemptsForWrite is set to " + maxRetryAttemptsForWrite +
202204
"maxTaskFailure " + maxTaskFailures)

core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
/*
1818
* Changes for TIBCO Project SnappyData data platform.
1919
*
20-
* Portions Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved.
20+
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
2121
*
2222
* Licensed under the Apache License, Version 2.0 (the "License"); you
2323
* may not use this file except in compliance with the License. You
@@ -43,11 +43,12 @@ import java.util.concurrent.ConcurrentLinkedQueue
4343
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
4444
import scala.math.{max, min}
4545
import scala.util.control.NonFatal
46+
4647
import org.apache.spark._
4748
import org.apache.spark.internal.Logging
4849
import org.apache.spark.scheduler.SchedulingMode._
49-
import org.apache.spark.TaskState.TaskState
5050
import org.apache.spark.serializer.SerializerInstance
51+
import org.apache.spark.TaskState.TaskState
5152
import org.apache.spark.util.{AccumulatorV2, Clock, SystemClock, Utils}
5253

5354
/**
@@ -97,10 +98,13 @@ private[spark] class TaskSetManager(
9798
sched.backend.getClass.getName.contains("SnappyCoarseGrainedSchedulerBackend")
9899

99100
// keep the configured value for spark.task.cpus preferring local job setting if present
100-
val confCpusPerTask: Int = taskSet.properties.getProperty(CPUS_PER_TASK) match {
101-
case s if (s ne null) && supportsDynamicCpusPerTask => max(s.toInt, sched.CPUS_PER_TASK)
102-
case _ => sched.CPUS_PER_TASK
103-
}
101+
private[spark] val confCpusPerTask: Int = if (taskSet.properties ne null) {
102+
taskSet.properties.getProperty(CPUS_PER_TASK) match {
103+
case s if (s ne null) && supportsDynamicCpusPerTask => max(s.toInt, sched.CPUS_PER_TASK)
104+
case _ => sched.CPUS_PER_TASK
105+
}
106+
} else sched.CPUS_PER_TASK
107+
104108
// tracks the max of spark.task.cpus across all tasks in this task set
105109
// when they are dynamically incremented for OOME/LME failures
106110
private[spark] var maxCpusPerTask: Int = confCpusPerTask
@@ -513,7 +517,8 @@ private[spark] class TaskSetManager(
513517
// Serialize and return the task
514518
val serializedTask: ByteBuffer = try {
515519
// Task.serializeWithDependencies(task, sched.sc.addedFiles, sched.sc.addedJars, ser)
516-
Task.serializeWithDependencies(task, sched.sc.addedFiles, sched.sc.addedJars, getSerializer(task))
520+
Task.serializeWithDependencies(task, sched.sc.addedFiles, sched.sc.addedJars,
521+
getSerializer(task))
517522
} catch {
518523
// If the task cannot be serialized, then there's no point to re-attempt the task,
519524
// as it will always fail. So just abort the whole task-set.

core/src/main/scala/org/apache/spark/ui/UIUtils.scala

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,24 @@
1414
* See the License for the specific language governing permissions and
1515
* limitations under the License.
1616
*/
17+
/*
18+
* Changes for TIBCO Project SnappyData data platform.
19+
*
20+
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
21+
*
22+
* Licensed under the Apache License, Version 2.0 (the "License"); you
23+
* may not use this file except in compliance with the License. You
24+
* may obtain a copy of the License at
25+
*
26+
* http://www.apache.org/licenses/LICENSE-2.0
27+
*
28+
* Unless required by applicable law or agreed to in writing, software
29+
* distributed under the License is distributed on an "AS IS" BASIS,
30+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
31+
* implied. See the License for the specific language governing
32+
* permissions and limitations under the License. See accompanying
33+
* LICENSE file.
34+
*/
1735

1836
package org.apache.spark.ui
1937

@@ -277,7 +295,8 @@ private[spark] object UIUtils extends Logging {
277295
</a>
278296
</div>
279297
<div class="brand" style="line-height: 2.5;">
280-
<a class="brand" href="https://github.com/TIBCOSoftware/snappydata" target="_blank">
298+
<a class="brand"
299+
href="https://github.com/TIBCOSoftware/snappydata" target="_blank">
281300
<img src={prependBaseUri("/static/snappydata/snappydata-175X28.png")}
282301
style="cursor: pointer;" />
283302
</a>
@@ -633,8 +652,8 @@ private[spark] object UIUtils extends Logging {
633652
</p>
634653
<p>
635654
For assistance, get started at: <br />
636-
<a href="https://www.snappydata.io/community" target="_blank">
637-
https://www.snappydata.io/community</a> <br />
655+
<a href="https://community.tibco.com/products/tibco-computedb" target="_blank">
656+
https://community.tibco.com/products/tibco-computedb</a> <br />
638657
<a href="https://www.tibco.com/" target="_blank">https://www.tibco.com/</a> <br />
639658
<a href="http://tibcosoftware.github.io/snappydata/" target="_blank">
640659
Product Documentation

0 commit comments

Comments
 (0)