Skip to content

Commit f9ecebd

Browse files
committed
Merge remote-tracking branch 'oss-spark/master' into to_avro_improve_NPE
2 parents 128865c + fef1b23 commit f9ecebd

File tree

574 files changed

+6760
-3265
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

574 files changed

+6760
-3265
lines changed

LICENSE-binary

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -268,6 +268,8 @@ io.fabric8:kubernetes-model-storageclass
268268
io.fabric8:zjsonpatch
269269
io.github.java-diff-utils:java-diff-utils
270270
io.jsonwebtoken:jjwt-api
271+
io.jsonwebtoken:jjwt-impl
272+
io.jsonwebtoken:jjwt-jackson
271273
io.netty:netty-all
272274
io.netty:netty-buffer
273275
io.netty:netty-codec

assembly/pom.xml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -345,11 +345,10 @@
345345
</properties>
346346
</profile>
347347

348-
<!-- Pull in jjwt-impl and jjwt-jackson jars -->
349348
<profile>
350-
<id>jjwt</id>
349+
<id>jjwt-provided</id>
351350
<properties>
352-
<jjwt.deps.scope>compile</jjwt.deps.scope>
351+
<jjwt.deps.scope>provided</jjwt.deps.scope>
353352
</properties>
354353
</profile>
355354

common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/DownloadFileWritableChannel.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,13 @@
1919

2020
import org.apache.spark.network.buffer.ManagedBuffer;
2121

22+
import java.io.IOException;
2223
import java.nio.channels.WritableByteChannel;
2324

2425
/**
2526
* A channel for writing data which is fetched to disk, which allows access to the written data only
2627
* after the writer has been closed. Used with DownloadFile and DownloadFileManager.
2728
*/
2829
public interface DownloadFileWritableChannel extends WritableByteChannel {
29-
ManagedBuffer closeAndRead();
30+
ManagedBuffer closeAndRead() throws IOException;
3031
}

common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/SimpleDownloadFile.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,8 @@ private class SimpleDownloadWritableChannel implements DownloadFileWritableChann
6969
}
7070

7171
@Override
72-
public ManagedBuffer closeAndRead() {
72+
public ManagedBuffer closeAndRead() throws IOException {
73+
channel.close();
7374
return new FileSegmentManagedBuffer(transportConf, file, 0, file.length());
7475
}
7576

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.network.shuffle;
19+
20+
import org.apache.spark.network.util.MapConfigProvider;
21+
import org.apache.spark.network.util.TransportConf;
22+
import org.junit.jupiter.api.Test;
23+
24+
import java.io.File;
25+
import java.io.IOException;
26+
27+
import org.junit.jupiter.api.Assertions;
28+
29+
public class SimpleDownloadFileSuite {
30+
@Test
31+
public void testChannelIsClosedAfterCloseAndRead() throws IOException {
32+
File tempFile = File.createTempFile("testChannelIsClosed", ".tmp");
33+
tempFile.deleteOnExit();
34+
TransportConf conf = new TransportConf("test", MapConfigProvider.EMPTY);
35+
36+
DownloadFile downloadFile = null;
37+
try {
38+
downloadFile = new SimpleDownloadFile(tempFile, conf);
39+
DownloadFileWritableChannel channel = downloadFile.openForWriting();
40+
channel.closeAndRead();
41+
Assertions.assertFalse(channel.isOpen(), "Channel should be closed after closeAndRead.");
42+
} finally {
43+
if (downloadFile != null) {
44+
downloadFile.delete();
45+
}
46+
}
47+
}
48+
}

common/utils/src/main/resources/error/error-conditions.json

Lines changed: 42 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,12 @@
8282
],
8383
"sqlState" : "22003"
8484
},
85+
"ARTIFACT_ALREADY_EXISTS" : {
86+
"message" : [
87+
"The artifact <normalizedRemoteRelativePath> already exists. Please choose a different name for the new artifact because it cannot be overwritten."
88+
],
89+
"sqlState" : "42713"
90+
},
8591
"ASSIGNMENT_ARITY_MISMATCH" : {
8692
"message" : [
8793
"The number of columns or variables assigned or aliased: <numTarget> does not match the number of source expressions: <numExpr>."
@@ -1702,6 +1708,12 @@
17021708
],
17031709
"sqlState" : "42822"
17041710
},
1711+
"HINT_UNSUPPORTED_FOR_JDBC_DIALECT" : {
1712+
"message" : [
1713+
"The option `hint` is not supported for <jdbcDialect> in JDBC data source. Supported dialects are `MySQLDialect`, `OracleDialect` and `DatabricksDialect`."
1714+
],
1715+
"sqlState" : "42822"
1716+
},
17051717
"HLL_INVALID_INPUT_SKETCH_BUFFER" : {
17061718
"message" : [
17071719
"Invalid call to <function>; only valid HLL sketch buffers are supported as inputs (such as those produced by the `hll_sketch_agg` function)."
@@ -2744,7 +2756,7 @@
27442756
},
27452757
"INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION" : {
27462758
"message" : [
2747-
"Cannot add an interval to a date because its microseconds part is not 0. If necessary set <ansiConfig> to \"false\" to bypass this error."
2759+
"Cannot add an interval to a date because its microseconds part is not 0. To resolve this, cast the input date to a timestamp, which supports the addition of intervals with non-zero microseconds."
27482760
],
27492761
"sqlState" : "22006"
27502762
},
@@ -3136,6 +3148,24 @@
31363148
],
31373149
"sqlState" : "42836"
31383150
},
3151+
"INVALID_RECURSIVE_REFERENCE" : {
3152+
"message" : [
3153+
"Invalid recursive reference found inside WITH RECURSIVE clause."
3154+
],
3155+
"subClass" : {
3156+
"NUMBER" : {
3157+
"message" : [
3158+
"Multiple self-references to one recursive CTE are not allowed."
3159+
]
3160+
},
3161+
"PLACE" : {
3162+
"message" : [
3163+
"Recursive references cannot be used on the right side of left outer/semi/anti joins, on the left side of right outer joins, in full outer joins, in aggregates, and in subquery expressions."
3164+
]
3165+
}
3166+
},
3167+
"sqlState" : "42836"
3168+
},
31393169
"INVALID_REGEXP_REPLACE" : {
31403170
"message" : [
31413171
"Could not perform regexp_replace for source = \"<source>\", pattern = \"<pattern>\", replacement = \"<replacement>\" and position = <position>."
@@ -3969,6 +3999,12 @@
39693999
],
39704000
"sqlState" : "0A000"
39714001
},
4002+
"NOT_SUPPORTED_CHANGE_SAME_COLUMN" : {
4003+
"message" : [
4004+
"ALTER TABLE ALTER/CHANGE COLUMN is not supported for changing <table>'s column <fieldName> including its nested fields multiple times in the same command."
4005+
],
4006+
"sqlState" : "0A000"
4007+
},
39724008
"NOT_SUPPORTED_COMMAND_FOR_V2_TABLE" : {
39734009
"message" : [
39744010
"<cmd> is not supported for v2 tables."
@@ -5254,6 +5290,11 @@
52545290
"Resilient Distributed Datasets (RDDs)."
52555291
]
52565292
},
5293+
"REGISTER_UDAF" : {
5294+
"message" : [
5295+
"Registering User Defined Aggregate Functions (UDAFs)."
5296+
]
5297+
},
52575298
"SESSION_BASE_RELATION_TO_DATAFRAME" : {
52585299
"message" : [
52595300
"Invoking SparkSession 'baseRelationToDataFrame'. This is server side developer API"

connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import java.util.UUID
2525

2626
import scala.jdk.CollectionConverters._
2727

28-
import org.apache.avro.{Schema, SchemaBuilder}
28+
import org.apache.avro.{AvroTypeException, Schema, SchemaBuilder, SchemaFormatter}
2929
import org.apache.avro.Schema.{Field, Type}
3030
import org.apache.avro.Schema.Type._
3131
import org.apache.avro.file.{DataFileReader, DataFileWriter}
@@ -86,7 +86,7 @@ abstract class AvroSuite
8686
}
8787

8888
def getAvroSchemaStringFromFiles(filePath: String): String = {
89-
new DataFileReader({
89+
val schema = new DataFileReader({
9090
val file = new File(filePath)
9191
if (file.isFile) {
9292
file
@@ -96,7 +96,8 @@ abstract class AvroSuite
9696
.filter(_.getName.endsWith("avro"))
9797
.head
9898
}
99-
}, new GenericDatumReader[Any]()).getSchema.toString(false)
99+
}, new GenericDatumReader[Any]()).getSchema
100+
SchemaFormatter.format(AvroUtils.JSON_INLINE_FORMAT, schema)
100101
}
101102

102103
private def getRootCause(ex: Throwable): Throwable = {

connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/application/ConnectRepl.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,8 @@ import ammonite.util.Util.newLine
3131

3232
import org.apache.spark.SparkBuildInfo.spark_version
3333
import org.apache.spark.annotation.DeveloperApi
34-
import org.apache.spark.sql.SparkSession
35-
import org.apache.spark.sql.SparkSession.withLocalConnectServer
34+
import org.apache.spark.sql.connect.SparkSession
35+
import org.apache.spark.sql.connect.SparkSession.withLocalConnectServer
3636
import org.apache.spark.sql.connect.client.{SparkConnectClient, SparkConnectClientParser}
3737

3838
/**

connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala

Lines changed: 0 additions & 168 deletions
This file was deleted.

connector/connect/client/jvm/src/test/java/org/apache/spark/sql/JavaEncoderSuite.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
import static org.apache.spark.sql.functions.*;
2929
import static org.apache.spark.sql.RowFactory.create;
3030
import org.apache.spark.api.java.function.MapFunction;
31-
import org.apache.spark.sql.test.SparkConnectServerUtils;
31+
import org.apache.spark.sql.connect.test.SparkConnectServerUtils;
3232
import org.apache.spark.sql.types.StructType;
3333

3434
/**

0 commit comments

Comments
 (0)