Skip to content

Commit a145482

Browse files
KSDaemonmarianore-muttdata
authored andcommitted
Revert(databricks-jdbc-driver): Switch to the latest OSS Databricks JDBC driver (cube-js#9420)
This reverts commit 29fdd61.
1 parent b1aedfa commit a145482

File tree

8 files changed

+116
-58
lines changed

8 files changed

+116
-58
lines changed

packages/cubejs-backend-shared/src/env.ts

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -958,15 +958,9 @@ const variables: Record<string, (...args: any) => any> = {
958958
* Accept Databricks policy flag. This environment variable doesn't
959959
* need to be split by the data source.
960960
*/
961-
databrickAcceptPolicy: () => {
962-
const val = get('CUBEJS_DB_DATABRICKS_ACCEPT_POLICY').asBoolStrict();
963-
964-
if (val !== undefined) {
965-
console.warn(
966-
'The CUBEJS_DB_DATABRICKS_ACCEPT_POLICY is not needed anymore. Please, remove it'
967-
);
968-
}
969-
},
961+
databrickAcceptPolicy: () => (
962+
get('CUBEJS_DB_DATABRICKS_ACCEPT_POLICY').asBoolStrict()
963+
),
970964

971965
/**
972966
* Databricks jdbc-connection url.

packages/cubejs-backend-shared/src/http-utils.ts

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -66,11 +66,9 @@ export async function streamWithProgress(
6666
type DownloadAndExtractFile = {
6767
showProgress: boolean;
6868
cwd: string;
69-
noExtract?: boolean;
70-
dstFileName?: string;
7169
};
7270

73-
export async function downloadAndExtractFile(url: string, { cwd, noExtract, dstFileName }: DownloadAndExtractFile) {
71+
export async function downloadAndExtractFile(url: string, { cwd }: DownloadAndExtractFile) {
7472
const request = new Request(url, {
7573
headers: new Headers({
7674
'Content-Type': 'application/octet-stream',
@@ -101,15 +99,7 @@ export async function downloadAndExtractFile(url: string, { cwd, noExtract, dstF
10199
});
102100
});
103101

104-
if (noExtract) {
105-
if (dstFileName) {
106-
fs.copyFileSync(savedFilePath, path.resolve(path.join(cwd, dstFileName)));
107-
} else {
108-
fs.copyFileSync(savedFilePath, cwd);
109-
}
110-
} else {
111-
await decompress(savedFilePath, cwd);
112-
}
102+
await decompress(savedFilePath, cwd);
113103

114104
try {
115105
fs.unlinkSync(savedFilePath);

packages/cubejs-backend-shared/test/db_env_multi.test.ts

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1192,6 +1192,34 @@ describe('Multiple datasources', () => {
11921192
);
11931193
});
11941194

1195+
test('getEnv("databrickAcceptPolicy")', () => {
1196+
process.env.CUBEJS_DB_DATABRICKS_ACCEPT_POLICY = 'true';
1197+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'default' })).toEqual(true);
1198+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'postgres' })).toEqual(true);
1199+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'wrong' })).toEqual(true);
1200+
1201+
process.env.CUBEJS_DB_DATABRICKS_ACCEPT_POLICY = 'false';
1202+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'default' })).toEqual(false);
1203+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'postgres' })).toEqual(false);
1204+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'wrong' })).toEqual(false);
1205+
1206+
process.env.CUBEJS_DB_DATABRICKS_ACCEPT_POLICY = 'wrong';
1207+
expect(() => getEnv('databrickAcceptPolicy', { dataSource: 'default' })).toThrow(
1208+
'env-var: "CUBEJS_DB_DATABRICKS_ACCEPT_POLICY" should be either "true", "false", "TRUE", or "FALSE"'
1209+
);
1210+
expect(() => getEnv('databrickAcceptPolicy', { dataSource: 'postgres' })).toThrow(
1211+
'env-var: "CUBEJS_DB_DATABRICKS_ACCEPT_POLICY" should be either "true", "false", "TRUE", or "FALSE"'
1212+
);
1213+
expect(() => getEnv('databrickAcceptPolicy', { dataSource: 'wrong' })).toThrow(
1214+
'env-var: "CUBEJS_DB_DATABRICKS_ACCEPT_POLICY" should be either "true", "false", "TRUE", or "FALSE"'
1215+
);
1216+
1217+
delete process.env.CUBEJS_DB_DATABRICKS_ACCEPT_POLICY;
1218+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'default' })).toBeUndefined();
1219+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'postgres' })).toBeUndefined();
1220+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'wrong' })).toBeUndefined();
1221+
});
1222+
11951223
test('getEnv("athenaAwsKey")', () => {
11961224
process.env.CUBEJS_AWS_KEY = 'default1';
11971225
process.env.CUBEJS_DS_POSTGRES_AWS_KEY = 'postgres1';

packages/cubejs-backend-shared/test/db_env_single.test.ts

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -760,6 +760,34 @@ describe('Single datasources', () => {
760760
expect(getEnv('databricksCatalog', { dataSource: 'wrong' })).toBeUndefined();
761761
});
762762

763+
test('getEnv("databrickAcceptPolicy")', () => {
764+
process.env.CUBEJS_DB_DATABRICKS_ACCEPT_POLICY = 'true';
765+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'default' })).toEqual(true);
766+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'postgres' })).toEqual(true);
767+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'wrong' })).toEqual(true);
768+
769+
process.env.CUBEJS_DB_DATABRICKS_ACCEPT_POLICY = 'false';
770+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'default' })).toEqual(false);
771+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'postgres' })).toEqual(false);
772+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'wrong' })).toEqual(false);
773+
774+
process.env.CUBEJS_DB_DATABRICKS_ACCEPT_POLICY = 'wrong';
775+
expect(() => getEnv('databrickAcceptPolicy', { dataSource: 'default' })).toThrow(
776+
'env-var: "CUBEJS_DB_DATABRICKS_ACCEPT_POLICY" should be either "true", "false", "TRUE", or "FALSE"'
777+
);
778+
expect(() => getEnv('databrickAcceptPolicy', { dataSource: 'postgres' })).toThrow(
779+
'env-var: "CUBEJS_DB_DATABRICKS_ACCEPT_POLICY" should be either "true", "false", "TRUE", or "FALSE"'
780+
);
781+
expect(() => getEnv('databrickAcceptPolicy', { dataSource: 'wrong' })).toThrow(
782+
'env-var: "CUBEJS_DB_DATABRICKS_ACCEPT_POLICY" should be either "true", "false", "TRUE", or "FALSE"'
783+
);
784+
785+
delete process.env.CUBEJS_DB_DATABRICKS_ACCEPT_POLICY;
786+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'default' })).toBeUndefined();
787+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'postgres' })).toBeUndefined();
788+
expect(getEnv('databrickAcceptPolicy', { dataSource: 'wrong' })).toBeUndefined();
789+
});
790+
763791
test('getEnv("athenaAwsKey")', () => {
764792
process.env.CUBEJS_AWS_KEY = 'default1';
765793
expect(getEnv('athenaAwsKey', { dataSource: 'default' })).toEqual('default1');

packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,21 +4,27 @@
44
* @fileoverview The `DatabricksDriver` and related types declaration.
55
*/
66

7-
import { assertDataSource, getEnv, } from '@cubejs-backend/shared';
87
import {
9-
DatabaseStructure,
8+
getEnv,
9+
assertDataSource,
10+
} from '@cubejs-backend/shared';
11+
import {
1012
DriverCapabilities,
11-
GenericDataBaseType,
1213
QueryColumnsResult,
1314
QueryOptions,
1415
QuerySchemasResult,
1516
QueryTablesResult,
16-
TableColumn,
1717
UnloadOptions,
18+
GenericDataBaseType,
19+
TableColumn,
20+
DatabaseStructure,
1821
} from '@cubejs-backend/base-driver';
19-
import { JDBCDriver, JDBCDriverConfiguration, } from '@cubejs-backend/jdbc-driver';
22+
import {
23+
JDBCDriver,
24+
JDBCDriverConfiguration,
25+
} from '@cubejs-backend/jdbc-driver';
2026
import { DatabricksQuery } from './DatabricksQuery';
21-
import { extractUidFromJdbcUrl, resolveJDBCDriver } from './helpers';
27+
import { resolveJDBCDriver, extractUidFromJdbcUrl } from './helpers';
2228

2329
export type DatabricksDriverConfiguration = JDBCDriverConfiguration &
2430
{
@@ -126,7 +132,7 @@ export class DatabricksDriver extends JDBCDriver {
126132
/**
127133
* Show warning message flag.
128134
*/
129-
private readonly showSparkProtocolWarn: boolean;
135+
private showSparkProtocolWarn: boolean;
130136

131137
/**
132138
* Driver Configuration.
@@ -423,7 +429,8 @@ export class DatabricksDriver extends JDBCDriver {
423429
metadata[database] = {};
424430
}
425431

426-
metadata[database][tableName] = await this.tableColumnTypes(`${database}.${tableName}`);
432+
const columns = await this.tableColumnTypes(`${database}.${tableName}`);
433+
metadata[database][tableName] = columns;
427434
}));
428435

429436
return metadata;

packages/cubejs-databricks-jdbc-driver/src/helpers.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import fs from 'fs';
22
import path from 'path';
33

4-
import { downloadJDBCDriver, OSS_DRIVER_VERSION } from './installer';
4+
import { downloadJDBCDriver } from './installer';
55

66
async function fileExistsOr(
77
fsPath: string,
@@ -15,16 +15,16 @@ async function fileExistsOr(
1515

1616
export async function resolveJDBCDriver(): Promise<string> {
1717
return fileExistsOr(
18-
path.join(process.cwd(), `databricks-jdbc-${OSS_DRIVER_VERSION}-oss.jar`),
18+
path.join(process.cwd(), 'DatabricksJDBC42.jar'),
1919
async () => fileExistsOr(
20-
path.join(__dirname, '..', 'download', `databricks-jdbc-${OSS_DRIVER_VERSION}-oss.jar`),
20+
path.join(__dirname, '..', 'download', 'DatabricksJDBC42.jar'),
2121
async () => {
2222
const pathOrNull = await downloadJDBCDriver();
2323
if (pathOrNull) {
2424
return pathOrNull;
2525
}
2626
throw new Error(
27-
`Please download and place databricks-jdbc-${OSS_DRIVER_VERSION}-oss.jar inside your ` +
27+
'Please download and place DatabricksJDBC42.jar inside your ' +
2828
'project directory'
2929
);
3030
}
Lines changed: 29 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,32 +1,38 @@
11
import path from 'path';
22
import { downloadAndExtractFile, getEnv } from '@cubejs-backend/shared';
33

4-
export const OSS_DRIVER_VERSION = '1.0.2';
5-
6-
/**
7-
* In the beginning of 2025 Databricks released their open-source version of JDBC driver and encourage
8-
* all users to migrate to it as company plans to focus on improving and evolving it over legacy simba driver.
9-
* More info about OSS Driver could be found at https://docs.databricks.com/aws/en/integrations/jdbc/oss
10-
* As of March 2025 To use the Databricks JDBC Driver (OSS), the following requirements must be met:
11-
* Java Runtime Environment (JRE) 11.0 or above. CI testing is supported on JRE 11, 17, and 21.
12-
*/
4+
function acceptedByEnv() {
5+
const acceptStatus = getEnv('databrickAcceptPolicy');
6+
if (acceptStatus) {
7+
console.log('You accepted Terms & Conditions for JDBC driver from DataBricks by CUBEJS_DB_DATABRICKS_ACCEPT_POLICY');
8+
}
9+
10+
if (acceptStatus === false) {
11+
console.log('You declined Terms & Conditions for JDBC driver from DataBricks by CUBEJS_DB_DATABRICKS_ACCEPT_POLICY');
12+
console.log('Installation will be skipped');
13+
}
14+
15+
return acceptStatus;
16+
}
17+
1318
export async function downloadJDBCDriver(): Promise<string | null> {
14-
// TODO: Just to throw a console warning that this ENV is obsolete and could be safely removed
15-
getEnv('databrickAcceptPolicy');
19+
const driverAccepted = acceptedByEnv();
20+
21+
if (driverAccepted) {
22+
console.log('Downloading DatabricksJDBC42-2.6.29.1051');
1623

17-
console.log(`Downloading databricks-jdbc-${OSS_DRIVER_VERSION}-oss.jar`);
24+
await downloadAndExtractFile(
25+
'https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/jdbc/2.6.29/DatabricksJDBC42-2.6.29.1051.zip',
26+
{
27+
showProgress: true,
28+
cwd: path.resolve(path.join(__dirname, '..', 'download')),
29+
}
30+
);
1831

19-
await downloadAndExtractFile(
20-
`https://repo1.maven.org/maven2/com/databricks/databricks-jdbc/${OSS_DRIVER_VERSION}-oss/databricks-jdbc-${OSS_DRIVER_VERSION}-oss.jar`,
21-
{
22-
showProgress: true,
23-
cwd: path.resolve(path.join(__dirname, '..', 'download')),
24-
noExtract: true,
25-
dstFileName: `databricks-jdbc-${OSS_DRIVER_VERSION}-oss.jar`,
26-
}
27-
);
32+
console.log('Release notes: https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/jdbc/2.6.29/docs/release-notes.txt');
2833

29-
console.log(`Release notes: https://mvnrepository.com/artifact/com.databricks/databricks-jdbc/${OSS_DRIVER_VERSION}-oss`);
34+
return path.resolve(path.join(__dirname, '..', 'download', 'DatabricksJDBC42.jar'));
35+
}
3036

31-
return path.resolve(path.join(__dirname, '..', 'download', `databricks-jdbc-${OSS_DRIVER_VERSION}-oss.jar`));
37+
return null;
3238
}

packages/cubejs-databricks-jdbc-driver/src/post-install.ts

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,16 @@
11
import 'source-map-support/register';
22

33
import { displayCLIError } from '@cubejs-backend/shared';
4-
import { resolveJDBCDriver } from './helpers';
4+
5+
import fs from 'fs';
6+
import path from 'path';
7+
import { downloadJDBCDriver } from './installer';
58

69
(async () => {
710
try {
8-
await resolveJDBCDriver();
11+
if (!fs.existsSync(path.join(__dirname, '..', 'download', 'SparkJDBC42.jar'))) {
12+
await downloadJDBCDriver();
13+
}
914
} catch (e: any) {
1015
await displayCLIError(e, 'Cube.js Databricks JDBC Installer');
1116
}

0 commit comments

Comments
 (0)