diff --git a/.github/workflows/drivers-tests.yml b/.github/workflows/drivers-tests.yml
index a125e7edae006..c814f0efceca3 100644
--- a/.github/workflows/drivers-tests.yml
+++ b/.github/workflows/drivers-tests.yml
@@ -204,6 +204,10 @@ jobs:
tests:
runs-on: ubuntu-24.04
+ permissions:
+ id-token: write # Needed for OIDC+AWS
+ contents: read
+
timeout-minutes: 30
needs: [latest-tag-sha, build]
if: (needs['latest-tag-sha'].outputs.sha != github.sha)
@@ -225,6 +229,7 @@ jobs:
snowflake
snowflake-encrypted-pk
snowflake-export-bucket-s3
+ snowflake-export-bucket-s3-via-storage-integration-iam-roles
snowflake-export-bucket-s3-prefix
snowflake-export-bucket-azure
snowflake-export-bucket-azure-prefix
@@ -259,6 +264,7 @@ jobs:
- snowflake
- snowflake-encrypted-pk
- snowflake-export-bucket-s3
+ - snowflake-export-bucket-s3-via-storage-integration-iam-roles
- snowflake-export-bucket-s3-prefix
- snowflake-export-bucket-azure
- snowflake-export-bucket-azure-prefix
@@ -338,6 +344,15 @@ jobs:
gunzip image.tar.gz
docker load -i image.tar
+ - name: Configure AWS credentials via IRSA
+ uses: aws-actions/configure-aws-credentials@v4
+ with:
+ role-to-assume: ${{ secrets.DRIVERS_TESTS_AWS_ROLE_ARN_FOR_SNOWFLAKE }}
+ aws-region: us-west-1
+ mask-aws-account-id: true
+ if: |
+ env.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY != '' && matrix.database == 'snowflake-export-bucket-s3-via-storage-integration-iam-roles'
+
- name: Run tests
uses: nick-fields/retry@v3
# It's enough to test for any one secret because they are set all at once or not set all
diff --git a/docs/pages/product/configuration/data-sources/snowflake.mdx b/docs/pages/product/configuration/data-sources/snowflake.mdx
index 4a0d2639bd065..c357294847e7a 100644
--- a/docs/pages/product/configuration/data-sources/snowflake.mdx
+++ b/docs/pages/product/configuration/data-sources/snowflake.mdx
@@ -133,15 +133,13 @@ Storage][google-cloud-storage] for export bucket functionality.
-Ensure the AWS credentials are correctly configured in IAM to allow reads and
-writes to the export bucket in S3 if you are not using storage integration.
-If you are using storage integration then you still need to configure access keys
-for Cube Store to be able to read from the export bucket.
-It's possible to authenticate with IAM roles instead of access keys for Cube Store.
+Ensure proper IAM privileges are configured for S3 bucket reads and writes, using either
+storage integration or user credentials for Snowflake and either IAM roles/IRSA or user
+credentials for Cube Store, with mixed configurations supported.
-Using IAM user credentials:
+Using IAM user credentials for both:
```dotenv
CUBEJS_DB_EXPORT_BUCKET_TYPE=s3
@@ -151,8 +149,8 @@ CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=
CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=
```
-[Using Storage Integration][snowflake-docs-aws-integration] to write to Export Bucket and
-then Access Keys to read from Cube Store:
+Using a [Storage Integration][snowflake-docs-aws-integration] to write to export buckets and
+user credentials to read from Cube Store:
```dotenv
CUBEJS_DB_EXPORT_BUCKET_TYPE=s3
@@ -163,7 +161,8 @@ CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=
CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=
```
-Using Storage Integration to write to export bocket and IAM role to read from Cube Store:
+Using a Storage Integration to write to export bucket and IAM role/IRSA to read from Cube Store:**
+
```dotenv
CUBEJS_DB_EXPORT_BUCKET_TYPE=s3
CUBEJS_DB_EXPORT_BUCKET=my.bucket.on.s3
diff --git a/packages/cubejs-snowflake-driver/package.json b/packages/cubejs-snowflake-driver/package.json
index fac71a32fa46e..a568bf931a91d 100644
--- a/packages/cubejs-snowflake-driver/package.json
+++ b/packages/cubejs-snowflake-driver/package.json
@@ -25,6 +25,7 @@
"lint:fix": "eslint --fix src/* --ext .ts"
},
"dependencies": {
+ "@aws-sdk/client-s3": "^3.726.0",
"@cubejs-backend/base-driver": "1.3.77",
"@cubejs-backend/shared": "1.3.77",
"date-fns-timezone": "^0.1.4",
diff --git a/packages/cubejs-snowflake-driver/src/SnowflakeDriver.ts b/packages/cubejs-snowflake-driver/src/SnowflakeDriver.ts
index 3f8dcc6f329d2..4b2b7df552c0c 100644
--- a/packages/cubejs-snowflake-driver/src/SnowflakeDriver.ts
+++ b/packages/cubejs-snowflake-driver/src/SnowflakeDriver.ts
@@ -23,6 +23,7 @@ import {
import { formatToTimeZone } from 'date-fns-timezone';
import fs from 'fs/promises';
import crypto from 'crypto';
+import { S3ClientConfig } from '@aws-sdk/client-s3';
import { HydrationMap, HydrationStream } from './HydrationStream';
const SUPPORTED_BUCKET_TYPES = ['s3', 'gcs', 'azure'];
@@ -106,8 +107,8 @@ const SnowflakeToGenericType: Record = {
interface SnowflakeDriverExportAWS {
bucketType: 's3',
bucketName: string,
- keyId: string,
- secretKey: string,
+ keyId?: string,
+ secretKey?: string,
region: string,
integrationName?: string,
}
@@ -328,14 +329,17 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {
if (bucketType === 's3') {
// integrationName is optional for s3
const integrationName = getEnv('dbExportIntegration', { dataSource });
+ // keyId and secretKey are optional for s3 if IAM role is used
+ const keyId = getEnv('dbExportBucketAwsKey', { dataSource });
+ const secretKey = getEnv('dbExportBucketAwsSecret', { dataSource });
return {
bucketType,
bucketName: getEnv('dbExportBucket', { dataSource }),
- keyId: getEnv('dbExportBucketAwsKey', { dataSource }),
- secretKey: getEnv('dbExportBucketAwsSecret', { dataSource }),
region: getEnv('dbExportBucketAwsRegion', { dataSource }),
...(integrationName !== undefined && { integrationName }),
+ ...(keyId !== undefined && { keyId }),
+ ...(secretKey !== undefined && { secretKey }),
};
}
@@ -387,6 +391,20 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {
);
}
+ private getRequiredExportBucketKeys(
+ exportBucket: SnowflakeDriverExportBucket,
+ emptyKeys: string[]
+ ): string[] {
+ if (exportBucket.bucketType === 's3') {
+ const s3Config = exportBucket as SnowflakeDriverExportAWS;
+ if (s3Config.integrationName) {
+ return emptyKeys.filter(key => key !== 'keyId' && key !== 'secretKey');
+ }
+ }
+
+ return emptyKeys;
+ }
+
protected getExportBucket(
dataSource: string,
): SnowflakeDriverExportBucket | undefined {
@@ -402,9 +420,11 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {
const emptyKeys = Object.keys(exportBucket)
.filter((key: string) => exportBucket[key] === undefined);
- if (emptyKeys.length) {
+ const keysToValidate = this.getRequiredExportBucketKeys(exportBucket, emptyKeys);
+
+ if (keysToValidate.length) {
throw new Error(
- `Unsupported configuration exportBucket, some configuration keys are empty: ${emptyKeys.join(',')}`
+ `Unsupported configuration exportBucket, some configuration keys are empty: ${keysToValidate.join(',')}`
);
}
@@ -731,7 +751,7 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {
// Storage integration export flow takes precedence over direct auth if it is defined
if (conf.integrationName) {
optionsToExport.STORAGE_INTEGRATION = conf.integrationName;
- } else {
+ } else if (conf.keyId && conf.secretKey) {
optionsToExport.CREDENTIALS = `(AWS_KEY_ID = '${conf.keyId}' AWS_SECRET_KEY = '${conf.secretKey}')`;
}
} else if (bucketType === 'gcs') {
@@ -771,14 +791,18 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {
const { bucketName, path } = this.parseBucketUrl(this.config.exportBucket!.bucketName);
const exportPrefix = path ? `${path}/${tableName}` : tableName;
+ const s3Config: S3ClientConfig = { region };
+ if (keyId && secretKey) {
+ // If access key and secret are provided, use them as credentials
+ // Otherwise, let the SDK use the default credential chain (IRSA, instance profile, etc.)
+ s3Config.credentials = {
+ accessKeyId: keyId,
+ secretAccessKey: secretKey,
+ };
+ }
+
return this.extractUnloadedFilesFromS3(
- {
- credentials: {
- accessKeyId: keyId,
- secretAccessKey: secretKey,
- },
- region,
- },
+ s3Config,
bucketName,
exportPrefix,
);
diff --git a/packages/cubejs-testing-drivers/fixtures/snowflake.json b/packages/cubejs-testing-drivers/fixtures/snowflake.json
index 29d46e844f592..728d6c2a0085b 100644
--- a/packages/cubejs-testing-drivers/fixtures/snowflake.json
+++ b/packages/cubejs-testing-drivers/fixtures/snowflake.json
@@ -22,6 +22,16 @@
}
}
},
+ "export-bucket-s3-via-storage-integration-iam-roles": {
+ "cube": {
+ "environment": {
+ "CUBEJS_DB_EXPORT_BUCKET_TYPE": "s3",
+ "CUBEJS_DB_EXPORT_BUCKET": "snowflake-drivers-tests-preaggs",
+ "CUBEJS_DB_EXPORT_BUCKET_AWS_REGION": "us-west-1",
+ "CUBEJS_DB_EXPORT_INTEGRATION": "DRIVERS_TESTS_PREAGGS_S3"
+ }
+ }
+ },
"export-bucket-azure": {
"cube": {
"environment": {
diff --git a/packages/cubejs-testing-drivers/package.json b/packages/cubejs-testing-drivers/package.json
index e371dc61d59a6..1d5fe806f639c 100644
--- a/packages/cubejs-testing-drivers/package.json
+++ b/packages/cubejs-testing-drivers/package.json
@@ -49,6 +49,7 @@
"snowflake-full": "yarn test-driver -i dist/test/snowflake-full.test.js",
"snowflake-encrypted-pk-full": "yarn test-driver -i dist/test/snowflake-encrypted-pk-full.test.js",
"snowflake-export-bucket-s3-full": "yarn test-driver -i dist/test/snowflake-export-bucket-s3-full.test.js",
+ "snowflake-export-bucket-s3-via-storage-integration-iam-roles-full": "yarn test-driver -i dist/test/snowflake-export-bucket-s3-via-storage-integration-iam-roles-full.test.js",
"snowflake-export-bucket-s3-prefix-full": "yarn test-driver -i dist/test/snowflake-export-bucket-s3-prefix-full.test.js",
"snowflake-export-bucket-azure-full": "yarn test-driver -i dist/test/snowflake-export-bucket-azure-full.test.js",
"snowflake-export-bucket-azure-prefix-full": "yarn test-driver -i dist/test/snowflake-export-bucket-azure-prefix-full.test.js",
@@ -59,7 +60,7 @@
"redshift-core": "yarn test-driver -i dist/test/redshift-core.test.js",
"redshift-full": "yarn test-driver -i dist/test/redshift-full.test.js",
"redshift-export-bucket-s3-full": "yarn test-driver -i dist/test/redshift-export-bucket-s3-full.test.js",
- "update-all-snapshots-local": "yarn run athena-export-bucket-s3-full --mode=local -u; yarn run bigquery-export-bucket-gcs-full --mode=local -u; yarn run clickhouse-full --mode=local -u; yarn run clickhouse-export-bucket-s3-full --mode=local -u; yarn run clickhouse-export-bucket-s3-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-azure-full --mode=local -u; yarn run databricks-jdbc-export-bucket-azure-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-gcs-full --mode=local -u; yarn run databricks-jdbc-export-bucket-gcs-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-s3-full --mode=local -u; yarn run databricks-jdbc-export-bucket-s3-prefix-full --mode=local -u; yarn run databricks-jdbc-full --mode=local -u; yarn run mssql-full --mode=local -u; yarn run mysql-full --mode=local -u; yarn run postgres-full --mode=local -u; yarn run redshift-export-bucket-s3-full --mode=local -u; yarn run redshift-full --mode=local -u; yarn run snowflake-encrypted-pk-full --mode=local -u; yarn run snowflake-export-bucket-azure-full --mode=local -u; yarn run snowflake-export-bucket-azure-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-via-storage-integration-full --mode=local -u; yarn run snowflake-export-bucket-gcs-full --mode=local -u; yarn run snowflake-export-bucket-gcs-prefix-full --mode=local -u; yarn run snowflake-export-bucket-s3-full --mode=local -u; yarn run snowflake-export-bucket-s3-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-full --mode=local -u; yarn run snowflake-full --mode=local -u",
+ "update-all-snapshots-local": "yarn run athena-export-bucket-s3-full --mode=local -u; yarn run bigquery-export-bucket-gcs-full --mode=local -u; yarn run clickhouse-full --mode=local -u; yarn run clickhouse-export-bucket-s3-full --mode=local -u; yarn run clickhouse-export-bucket-s3-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-azure-full --mode=local -u; yarn run databricks-jdbc-export-bucket-azure-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-gcs-full --mode=local -u; yarn run databricks-jdbc-export-bucket-gcs-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-s3-full --mode=local -u; yarn run databricks-jdbc-export-bucket-s3-prefix-full --mode=local -u; yarn run databricks-jdbc-full --mode=local -u; yarn run mssql-full --mode=local -u; yarn run mysql-full --mode=local -u; yarn run postgres-full --mode=local -u; yarn run redshift-export-bucket-s3-full --mode=local -u; yarn run redshift-full --mode=local -u; yarn run snowflake-encrypted-pk-full --mode=local -u; yarn run snowflake-export-bucket-azure-full --mode=local -u; yarn run snowflake-export-bucket-azure-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-via-storage-integration-full --mode=local -u; yarn run snowflake-export-bucket-gcs-full --mode=local -u; yarn run snowflake-export-bucket-gcs-prefix-full --mode=local -u; yarn run snowflake-export-bucket-s3-full --mode=local -u; yarn run snowflake-export-bucket-s3-via-storage-integration-iam-roles-full --mode=local -u; yarn run snowflake-export-bucket-s3-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-full --mode=local -u; yarn run snowflake-full --mode=local -u",
"tst": "clear && yarn tsc && yarn bigquery-core"
},
"files": [
diff --git a/packages/cubejs-testing-drivers/src/helpers/getComposePath.ts b/packages/cubejs-testing-drivers/src/helpers/getComposePath.ts
index 5c0f895100251..42b2012b41b5c 100644
--- a/packages/cubejs-testing-drivers/src/helpers/getComposePath.ts
+++ b/packages/cubejs-testing-drivers/src/helpers/getComposePath.ts
@@ -27,6 +27,27 @@ export function getComposePath(type: string, fixture: Fixture, isLocal: boolean)
'./package.json:/cube/conf/package.json',
'./model/ecommerce.yaml:/cube/conf/model/ecommerce.yaml',
];
+
+ // Add AWS credential mounting for IRSA-enabled tests
+ if (process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY && process.env.AWS_SESSION_TOKEN) {
+ const awsCredentialsDir = path.resolve(_path, '.aws');
+ fs.ensureDirSync(awsCredentialsDir);
+
+ const credentialsContent = `[default]
+aws_access_key_id = ${process.env.AWS_ACCESS_KEY_ID}
+aws_secret_access_key = ${process.env.AWS_SECRET_ACCESS_KEY}
+aws_session_token = ${process.env.AWS_SESSION_TOKEN}
+`;
+
+ const configContent = `[default]
+region = ${process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION || 'us-west-1'}
+`;
+
+ fs.writeFileSync(path.resolve(awsCredentialsDir, 'credentials'), credentialsContent);
+ fs.writeFileSync(path.resolve(awsCredentialsDir, 'config'), configContent);
+
+ volumes.push('./.aws:/root/.aws:ro');
+ }
const compose: any = {
version: '2.2',
services: {
@@ -46,6 +67,9 @@ export function getComposePath(type: string, fixture: Fixture, isLocal: boolean)
image: `cubejs/cubestore:${process.arch === 'arm64' ? 'arm64v8' : 'latest'}`,
ports: ['3030'],
restart: 'always',
+ ...(process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY && process.env.AWS_SESSION_TOKEN ? {
+ volumes: ['./.aws:/root/.aws:ro']
+ } : {})
}
}
};
diff --git a/packages/cubejs-testing-drivers/test/snowflake-export-bucket-s3-via-storage-integration-iam-roles-full.test.ts b/packages/cubejs-testing-drivers/test/snowflake-export-bucket-s3-via-storage-integration-iam-roles-full.test.ts
new file mode 100644
index 0000000000000..5e3d3a3e76d02
--- /dev/null
+++ b/packages/cubejs-testing-drivers/test/snowflake-export-bucket-s3-via-storage-integration-iam-roles-full.test.ts
@@ -0,0 +1,9 @@
+import { testQueries } from '../src/tests/testQueries';
+
+testQueries('snowflake', {
+ // NOTICE: It's enough to turn on this flag only once for any one
+ // cloud storage integration. Please do not turn it on for every integration test!
+ includeIncrementalSchemaSuite: false,
+ includeHLLSuite: false,
+ extendedEnv: 'export-bucket-s3-via-storage-integration-iam-roles'
+});