Skip to content

Commit 98cf13b

Browse files
committed
solved iceberg ez
1 parent 51f42fd commit 98cf13b

File tree

6 files changed

+126
-111
lines changed

6 files changed

+126
-111
lines changed
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
{{
2+
config(
3+
materialized='table',
4+
file_format='iceberg'
5+
)
6+
}}
7+
8+
-- Simple SELECT to verify that dbt can read from your manual table
9+
-- and write to a new Iceberg table
10+
SELECT * FROM {{ source('test', 'default.employees') }}
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
version: 2
2+
3+
sources:
4+
- name: test
5+
database: nessie
6+
schema: nessie
7+
tables:
8+
- name: default.employees
9+
description: "Test table"

nessie-stack/jaffle-shop-classic-spark/profiles.yml

Lines changed: 13 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,10 @@ spark_profile:
55
type: spark
66
method: thrift
77
host: localhost
8-
port: 10001 # Default Spark Thrift port, adjust if needed
8+
port: 10000 # Default Spark Thrift port, adjust if needed
99
user: # leave blank if not using auth
1010
password: # leave blank if not using auth
11-
database: default # or your specific database
12-
schema: default # or your specific schema
11+
schema: nessie.default # or your specific schema
1312
threads: 1
1413
connect_timeout: 60
1514
connect_retries: 5
@@ -21,21 +20,18 @@ spark_profile:
2120
connect_args:
2221
spark.jars.packages: org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.8.1,org.projectnessie.nessie-integrations:nessie-spark-extensions-3.5_2.12:0.102.5,org.apache.hadoop:hadoop-aws:3.3.4
2322
spark.sql.extensions: org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,org.projectnessie.spark.extensions.NessieSparkSessionExtensions
24-
spark.sql.catalog.iceberg: org.apache.iceberg.spark.SparkCatalog
25-
spark.sql.catalog.iceberg.type: nessie
26-
spark.sql.catalog.iceberg.warehouse: s3a://warehouse
27-
spark.sql.catalog.iceberg.uri: http://nessie:19120/api/v1
28-
spark.sql.catalog.iceberg.ref: main
29-
spark.sql.catalog.iceberg.io-impl: org.apache.iceberg.aws.s3.S3FileIO
30-
spark.sql.catalog.iceberg.s3.endpoint: http://minio:9000
31-
spark.sql.catalog.iceberg.s3.path-style-access: true
32-
spark.sql.catalog.iceberg.s3.impl: org.apache.hadoop.fs.s3a.S3AFileSystem
33-
spark.sql.catalog.iceberg.s3.access.key: minioadmin
34-
spark.sql.catalog.iceberg.s3.secret.key: minioadmin
23+
spark.sql.catalog.nessie: org.apache.iceberg.spark.SparkCatalog
24+
spark.sql.catalog.nessie.type: nessie
25+
spark.sql.catalog.nessie.warehouse: s3a://warehouse
26+
spark.sql.catalog.nessie.uri: http://nessie:19120/api/v1
27+
spark.sql.catalog.nessie.ref: main
28+
spark.sql.catalog.nessie.io-impl: org.apache.iceberg.aws.s3.S3FileIO
29+
spark.sql.catalog.nessie.s3.endpoint: http://minio:9000
30+
spark.sql.catalog.nessie.s3.path-style-access: true
31+
spark.sql.catalog.nessie.s3.impl: org.apache.hadoop.fs.s3a.S3AFileSystem
32+
spark.sql.catalog.nessie.s3.access.key: minioadmin
33+
spark.sql.catalog.nessie.s3.secret.key: minioadmin
3534
# Add these new configurations
36-
spark.sql.defaultCatalog: iceberg
37-
spark.sql.catalog.spark_catalog: org.apache.iceberg.spark.SparkSessionCatalog
38-
spark.sql.catalog.spark_catalog.type: nessie
3935
spark.hadoop.fs.s3a.access.key: minioadmin
4036
spark.hadoop.fs.s3a.secret.key: minioadmin
4137
spark.hadoop.fs.s3a.endpoint: http://minio:9000

0 commit comments

Comments
 (0)