Skip to content

Commit 1e47f0f

Browse files
committed
3.3.x < hadoop <= 3.4.1
1 parent c659f6e commit 1e47f0f

File tree

2 files changed

+3
-8
lines changed

2 files changed

+3
-8
lines changed

dev/hive/Dockerfile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,15 @@ FROM openjdk:8-jre-slim AS build
1717

1818
RUN apt-get update -qq && apt-get -qq -y install curl
1919

20-
ENV HADOOP_VERSION=3.3.6
20+
ENV HADOOP_VERSION=3.4.1
2121
ENV AWS_SDK_BUNDLE=1.12.753
2222

2323
RUN curl https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/${HADOOP_VERSION}/hadoop-aws-${HADOOP_VERSION}.jar -Lo /tmp/hadoop-aws-${HADOOP_VERSION}.jar
2424
RUN curl https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/${AWS_SDK_BUNDLE}/aws-java-sdk-bundle-${AWS_SDK_BUNDLE}.jar -Lo /tmp/aws-java-sdk-bundle-${AWS_SDK_BUNDLE}.jar
2525

2626
FROM apache/hive:4.0.0
2727

28-
ENV HADOOP_VERSION=3.3.6
28+
ENV HADOOP_VERSION=3.4.1
2929
ENV AWS_SDK_BUNDLE=1.12.753
3030

3131
COPY --from=build /tmp/hadoop-aws-${HADOOP_VERSION}.jar /opt/hive/lib/hadoop-aws-${HADOOP_VERSION}.jar

tests/conftest.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2535,7 +2535,7 @@ def spark() -> "SparkSession":
25352535
spark_version = ".".join(importlib.metadata.version("pyspark").split(".")[:2])
25362536
scala_version = "2.13"
25372537
iceberg_version = "1.10.0"
2538-
hadoop_version = "3.3.6"
2538+
hadoop_version = "3.4.1"
25392539
aws_sdk_version = "1.12.753"
25402540

25412541
os.environ["PYSPARK_SUBMIT_ARGS"] = (
@@ -2577,11 +2577,6 @@ def spark() -> "SparkSession":
25772577
.config("spark.sql.catalog.spark_catalog.warehouse", "s3://warehouse/hive/")
25782578
.config("spark.hadoop.fs.s3a.endpoint", "http://localhost:9000")
25792579
.config("spark.hadoop.fs.s3a.path.style.access", "true")
2580-
.config("spark.hadoop.fs.s3a.threads.keepalivetime", "60000")
2581-
.config("spark.hadoop.fs.s3a.connection.establish.timeout", "30000")
2582-
.config("spark.hadoop.fs.s3a.connection.timeout", "200000")
2583-
.config("spark.hadoop.fs.s3a.multipart.purge.age", str(24 * 60 * 60))
2584-
.config("spark.hadoop.fs.s3a.aws.credentials.provider", "com.amazonaws.auth.EnvironmentVariableCredentialsProvider")
25852580
.config("spark.sql.catalogImplementation", "hive")
25862581
.config("spark.sql.defaultCatalog", "integration")
25872582
.config("spark.sql.execution.arrow.pyspark.enabled", "true")

0 commit comments

Comments
 (0)