Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/drivers-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ jobs:
athena-export-bucket-s3
bigquery-export-bucket-gcs
clickhouse-export-bucket-s3
clickhouse-export-bucket-s3-prefix
databricks-jdbc
databricks-jdbc-export-bucket-s3
databricks-jdbc-export-bucket-s3-prefix
Expand Down Expand Up @@ -242,6 +243,7 @@ jobs:
- bigquery-export-bucket-gcs
- clickhouse
- clickhouse-export-bucket-s3
- clickhouse-export-bucket-s3-prefix
- databricks-jdbc
- databricks-jdbc-export-bucket-s3
- databricks-jdbc-export-bucket-s3-prefix
Expand Down
39 changes: 25 additions & 14 deletions packages/cubejs-clickhouse-driver/src/ClickHouseDriver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ import sqlstring from 'sqlstring';

import { transformRow, transformStreamRow } from './HydrationStream';

const SUPPORTED_BUCKET_TYPES = ['s3'];

const ClickhouseTypeToGeneric: Record<string, string> = {
enum: 'text',
string: 'text',
Expand Down Expand Up @@ -489,11 +491,9 @@ export class ClickHouseDriver extends BaseDriver implements DriverInterface {
protected getExportBucket(
dataSource: string,
): ClickhouseDriverExportAWS | null {
const supportedBucketTypes = ['s3'];

const requiredExportBucket: ClickhouseDriverExportRequiredAWS = {
bucketType: getEnv('dbExportBucketType', {
supported: supportedBucketTypes,
supported: SUPPORTED_BUCKET_TYPES,
dataSource,
}),
bucketName: getEnv('dbExportBucket', { dataSource }),
Expand All @@ -507,9 +507,9 @@ export class ClickHouseDriver extends BaseDriver implements DriverInterface {
};

if (exportBucket.bucketType) {
if (!supportedBucketTypes.includes(exportBucket.bucketType)) {
if (!SUPPORTED_BUCKET_TYPES.includes(exportBucket.bucketType)) {
throw new Error(
`Unsupported EXPORT_BUCKET_TYPE, supported: ${supportedBucketTypes.join(',')}`
`Unsupported EXPORT_BUCKET_TYPE, supported: ${SUPPORTED_BUCKET_TYPES.join(',')}`
);
}

Expand All @@ -529,11 +529,7 @@ export class ClickHouseDriver extends BaseDriver implements DriverInterface {
}

public async isUnloadSupported() {
if (this.config.exportBucket) {
return true;
}

return false;
return !!this.config.exportBucket;
}

/**
Expand Down Expand Up @@ -588,18 +584,33 @@ export class ClickHouseDriver extends BaseDriver implements DriverInterface {
);
}

public async unloadFromQuery(sql: string, params: unknown[], options: UnloadOptions): Promise<DownloadTableCSVData> {
/**
* Returns clean S3 bucket name and prefix path ending with / (if set)
*/
private parseS3Path(input: string): { bucket: string; prefix: string | null } {
let trimmed = input.startsWith('s3://') ? input.slice(5) : input;
trimmed = trimmed.endsWith('/') ? trimmed.slice(0, -1) : trimmed;
const parts = trimmed.split('/');
const bucket = parts[0];
const prefixParts = parts.slice(1);
const prefix = prefixParts.length > 0 ? `${prefixParts.join('/')}/` : null;

return { bucket, prefix };
}

public async unloadFromQuery(sql: string, params: unknown[], _options: UnloadOptions): Promise<DownloadTableCSVData> {
if (!this.config.exportBucket) {
throw new Error('Unload is not configured');
}

const types = await this.queryColumnTypes(`(${sql})`, params);
const exportPrefix = uuidv4();
const { bucket, prefix } = this.parseS3Path(this.config.exportBucket.bucketName);
const exportPrefix = prefix ? `${prefix}${uuidv4()}` : uuidv4();

const formattedQuery = sqlstring.format(`
INSERT INTO FUNCTION
s3(
'https://${this.config.exportBucket.bucketName}.s3.${this.config.exportBucket.region}.amazonaws.com/${exportPrefix}/export.csv.gz',
'https://${bucket}.s3.${this.config.exportBucket.region}.amazonaws.com/${exportPrefix}/export.csv.gz',
'${this.config.exportBucket.keyId}',
'${this.config.exportBucket.secretKey}',
'CSV'
Expand All @@ -617,7 +628,7 @@ export class ClickHouseDriver extends BaseDriver implements DriverInterface {
},
region: this.config.exportBucket.region,
},
this.config.exportBucket.bucketName,
bucket,
exportPrefix,
);

Expand Down
11 changes: 11 additions & 0 deletions packages/cubejs-testing-drivers/fixtures/clickhouse.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,17 @@
"CUBEJS_DB_EXPORT_BUCKET_AWS_REGION": "us-east-1"
}
}
},
"export-bucket-s3-prefix": {
"cube": {
"environment": {
"CUBEJS_DB_EXPORT_BUCKET_TYPE": "s3",
"CUBEJS_DB_EXPORT_BUCKET": "clickhouse-drivers-tests-preaggs/testing_prefix/for_export_buckets/",
"CUBEJS_DB_EXPORT_BUCKET_AWS_KEY": "${DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY}",
"CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET": "${DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET}",
"CUBEJS_DB_EXPORT_BUCKET_AWS_REGION": "us-east-1"
}
}
}
},
"cube": {
Expand Down
1 change: 1 addition & 0 deletions packages/cubejs-testing-drivers/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"clickhouse-core": "yarn test-driver -i dist/test/clickhouse-core.test.js",
"clickhouse-full": "yarn test-driver -i dist/test/clickhouse-full.test.js",
"clickhouse-export-bucket-s3-full": "yarn test-driver -i dist/test/clickhouse-export-bucket-s3-full.test.js",
"clickhouse-export-bucket-s3-prefix-full": "yarn test-driver -i dist/test/clickhouse-export-bucket-s3-prefix-full.test.js",
"databricks-jdbc-driver": "yarn test-driver -i dist/test/databricks-jdbc-driver.test.js",
"databricks-jdbc-core": "yarn test-driver -i dist/test/databricks-jdbc-core.test.js",
"databricks-jdbc-full": "yarn test-driver -i dist/test/databricks-jdbc-full.test.js",
Expand Down
Loading
Loading