Skip to content

Commit 1f6eba3

Browse files
authored
Merge pull request apache-spark-on-k8s#396 from palantir/rk/upstream-again
2 parents c30c321 + 8cba441 commit 1f6eba3

File tree

266 files changed

+6339
-1949
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

266 files changed

+6339
-1949
lines changed

.circleci/config.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,7 @@ jobs:
314314
run-scala-tests:
315315
<<: *test-defaults
316316
# project/CirclePlugin.scala does its own test splitting in SBT based on CIRCLE_NODE_INDEX, CIRCLE_NODE_TOTAL
317-
parallelism: 9
317+
parallelism: 12
318318
# Spark runs a lot of tests in parallel, we need 16 GB of RAM for this
319319
resource_class: xlarge
320320
steps:

R/pkg/R/mllib_tree.R

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,18 @@ setMethod("write.ml", signature(object = "GBTClassificationModel", path = "chara
362362
#' For regression, must be "variance". For classification, must be one of
363363
#' "entropy" and "gini", default is "gini".
364364
#' @param featureSubsetStrategy The number of features to consider for splits at each tree node.
365-
#' Supported options: "auto", "all", "onethird", "sqrt", "log2", (0.0-1.0], [1-n].
365+
#' Supported options: "auto" (choose automatically for task: If
366+
#' numTrees == 1, set to "all." If numTrees > 1
367+
#' (forest), set to "sqrt" for classification and
368+
#' to "onethird" for regression),
369+
#' "all" (use all features),
370+
#' "onethird" (use 1/3 of the features),
371+
#' "sqrt" (use sqrt(number of features)),
372+
#' "log2" (use log2(number of features)),
373+
#' "n": (when n is in the range (0, 1.0], use
374+
#' n * number of features. When n is in the range
375+
#' (1, number of features), use n features).
376+
#' Default is "auto".
366377
#' @param seed integer seed for random number generation.
367378
#' @param subsamplingRate Fraction of the training data used for learning each decision tree, in
368379
#' range (0, 1].

assembly/README

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,4 @@ This module is off by default. To activate it specify the profile in the command
99

1010
If you need to build an assembly for a different version of Hadoop the
1111
hadoop-version system property needs to be set as in this example:
12-
-Dhadoop.version=2.7.3
12+
-Dhadoop.version=2.7.7

common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDB.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ public void close() throws IOException {
234234
* Closes the given iterator if the DB is still open. Trying to close a JNI LevelDB handle
235235
* with a closed DB can cause JVM crashes, so this ensures that situation does not happen.
236236
*/
237-
void closeIterator(LevelDBIterator it) throws IOException {
237+
void closeIterator(LevelDBIterator<?> it) throws IOException {
238238
synchronized (this._db) {
239239
DB _db = this._db.get();
240240
if (_db != null) {

common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -318,7 +318,7 @@ private class StdChannelListener
318318
}
319319

320320
@Override
321-
public void operationComplete(Future future) throws Exception {
321+
public void operationComplete(Future<? super Void> future) throws Exception {
322322
if (future.isSuccess()) {
323323
if (logger.isTraceEnabled()) {
324324
long timeTaken = System.currentTimeMillis() - startTime;

common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -212,8 +212,8 @@ public void handle(ResponseMessage message) throws Exception {
212212
if (entry != null) {
213213
StreamCallback callback = entry.getValue();
214214
if (resp.byteCount > 0) {
215-
StreamInterceptor interceptor = new StreamInterceptor(this, resp.streamId, resp.byteCount,
216-
callback);
215+
StreamInterceptor<ResponseMessage> interceptor = new StreamInterceptor<>(
216+
this, resp.streamId, resp.byteCount, callback);
217217
try {
218218
TransportFrameDecoder frameDecoder = (TransportFrameDecoder)
219219
channel.pipeline().get(TransportFrameDecoder.HANDLER_NAME);

common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ private void encryptMore() throws IOException {
267267
int copied = byteRawChannel.write(buf.nioBuffer());
268268
buf.skipBytes(copied);
269269
} else {
270-
region.transferTo(byteRawChannel, region.transfered());
270+
region.transferTo(byteRawChannel, region.transferred());
271271
}
272272
cos.write(byteRawChannel.getData(), 0, byteRawChannel.length());
273273
cos.flush();

common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ private void nextChunk() throws IOException {
301301
int copied = byteChannel.write(buf.nioBuffer());
302302
buf.skipBytes(copied);
303303
} else {
304-
region.transferTo(byteChannel, region.transfered());
304+
region.transferTo(byteChannel, region.transferred());
305305
}
306306

307307
byte[] encrypted = backend.wrap(byteChannel.getData(), 0, byteChannel.length());

common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -252,8 +252,8 @@ public String getID() {
252252
}
253253
};
254254
if (req.bodyByteCount > 0) {
255-
StreamInterceptor interceptor = new StreamInterceptor(this, wrappedCallback.getID(),
256-
req.bodyByteCount, wrappedCallback);
255+
StreamInterceptor<RequestMessage> interceptor = new StreamInterceptor<>(
256+
this, wrappedCallback.getID(), req.bodyByteCount, wrappedCallback);
257257
frameDecoder.setInterceptor(interceptor);
258258
} else {
259259
wrappedCallback.onComplete(wrappedCallback.getID());

common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -148,11 +148,11 @@ public void close() {
148148
channelFuture.channel().close().awaitUninterruptibly(10, TimeUnit.SECONDS);
149149
channelFuture = null;
150150
}
151-
if (bootstrap != null && bootstrap.group() != null) {
152-
bootstrap.group().shutdownGracefully();
151+
if (bootstrap != null && bootstrap.config().group() != null) {
152+
bootstrap.config().group().shutdownGracefully();
153153
}
154-
if (bootstrap != null &&bootstrap.childGroup() != null) {
155-
bootstrap.childGroup().shutdownGracefully();
154+
if (bootstrap != null && bootstrap.config().childGroup() != null) {
155+
bootstrap.config().childGroup().shutdownGracefully();
156156
}
157157
bootstrap = null;
158158
}

0 commit comments

Comments
 (0)