Skip to content

Commit 35d9649

Browse files
committed
add s3 comments
1 parent 50c2d10 commit 35d9649

File tree

2 files changed

+4
-8
lines changed

2 files changed

+4
-8
lines changed

tests/templates/kuttl/spark-connect/10-deploy-spark-connect.yaml.j2

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@ spec:
3636
vectorAggregatorConfigMapName: vector-aggregator-discovery
3737
{% endif %}
3838
args:
39+
# These are unfortunatly required to make the S3A connector work with MinIO
40+
# I had expected the clients to be able to set these, but that is not the case.
3941
- --conf spark.hadoop.fs.s3a.aws.credentials.provider=org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider
4042
- --conf spark.hadoop.fs.s3a.path.style.access=true
4143
- --conf spark.hadoop.fs.s3a.endpoint=http://minio:9000
@@ -46,8 +48,6 @@ spec:
4648
containers:
4749
- name: spark
4850
env:
49-
- name: DEMO_GREETING
50-
value: "Hello from the overlords"
5151
- name: AWS_ACCESS_KEY_ID
5252
valueFrom:
5353
secretKeyRef:

tests/templates/kuttl/spark-connect/20-run-connect-client.yaml.j2

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,15 +15,11 @@ data:
1515

1616
print(f"Connecting to Spark Connect server at {remote}")
1717

18+
# Adding s3a configuration properties here has no effect unfortunately.
19+
# They need to be set in the SparkConnectServer.
1820
spark = (
1921
SparkSession.builder.appName("SimpleSparkConnectApp")
2022
.remote(remote)
21-
.config("spark.hadoop.fs.s3a.aws.credentials.provider", "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider")
22-
.config("spark.hadoop.fs.s3a.path.style.access", "true")
23-
.config("spark.hadoop.fs.s3a.endpoint", "http://minio:9000")
24-
.config("spark.hadoop.fs.s3a.region", "us-east-1")
25-
.config("spark.hadoop.fs.s3a.access.key", "spark")
26-
.config("spark.hadoop.fs.s3a.secret.key", "sparkspark")
2723
.getOrCreate()
2824
)
2925

0 commit comments

Comments
 (0)