File tree Expand file tree Collapse file tree 2 files changed +4
-8
lines changed
tests/templates/kuttl/spark-connect Expand file tree Collapse file tree 2 files changed +4
-8
lines changed Original file line number Diff line number Diff line change 3636 vectorAggregatorConfigMapName: vector-aggregator-discovery
3737{% endif %}
3838 args:
39+ # These are unfortunatly required to make the S3A connector work with MinIO
40+ # I had expected the clients to be able to set these, but that is not the case.
3941 - --conf spark.hadoop.fs.s3a.aws.credentials.provider=org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider
4042 - --conf spark.hadoop.fs.s3a.path.style.access=true
4143 - --conf spark.hadoop.fs.s3a.endpoint=http://minio:9000
4648 containers:
4749 - name: spark
4850 env:
49- - name: DEMO_GREETING
50- value: "Hello from the overlords"
5151 - name: AWS_ACCESS_KEY_ID
5252 valueFrom:
5353 secretKeyRef:
Original file line number Diff line number Diff line change @@ -15,15 +15,11 @@ data:
1515
1616 print(f"Connecting to Spark Connect server at {remote}")
1717
18+ # Adding s3a configuration properties here has no effect unfortunately.
19+ # They need to be set in the SparkConnectServer.
1820 spark = (
1921 SparkSession.builder.appName("SimpleSparkConnectApp")
2022 .remote(remote)
21- .config("spark.hadoop.fs.s3a.aws.credentials.provider", "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider")
22- .config("spark.hadoop.fs.s3a.path.style.access", "true")
23- .config("spark.hadoop.fs.s3a.endpoint", "http://minio:9000")
24- .config("spark.hadoop.fs.s3a.region", "us-east-1")
25- .config("spark.hadoop.fs.s3a.access.key", "spark")
26- .config("spark.hadoop.fs.s3a.secret.key", "sparkspark")
2723 .getOrCreate()
2824 )
2925
You can’t perform that action at this time.
0 commit comments