From 307e8019b602892914a729618179ea843db655a3 Mon Sep 17 00:00:00 2001 From: Konstantin Burkalev Date: Tue, 3 Jun 2025 14:24:02 +0300 Subject: [PATCH 1/3] remove databrickAcceptPolicy env --- packages/cubejs-backend-shared/src/env.ts | 15 --------------- .../src/installer.ts | 3 --- 2 files changed, 18 deletions(-) diff --git a/packages/cubejs-backend-shared/src/env.ts b/packages/cubejs-backend-shared/src/env.ts index c7b0c2e3bd584..bd6307dd1a48f 100644 --- a/packages/cubejs-backend-shared/src/env.ts +++ b/packages/cubejs-backend-shared/src/env.ts @@ -951,21 +951,6 @@ const variables: Record any> = { * Databricks Driver * ***************************************************************** */ - /** - * Accept Databricks policy flag. This environment variable doesn't - * need to be split by the data source. - * TODO: Tech-debt: Remove totally someday - */ - databrickAcceptPolicy: () => { - const val = get('CUBEJS_DB_DATABRICKS_ACCEPT_POLICY').asBoolStrict(); - - if (val !== undefined) { - console.warn( - 'The CUBEJS_DB_DATABRICKS_ACCEPT_POLICY is not needed anymore. Please, remove it' - ); - } - }, - /** * Databricks jdbc-connection url. */ diff --git a/packages/cubejs-databricks-jdbc-driver/src/installer.ts b/packages/cubejs-databricks-jdbc-driver/src/installer.ts index fce2a28330468..2294d2e5e3cff 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/installer.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/installer.ts @@ -11,9 +11,6 @@ export const OSS_DRIVER_VERSION = '1.0.2'; * Java Runtime Environment (JRE) 11.0 or above. CI testing is supported on JRE 11, 17, and 21. */ export async function downloadJDBCDriver(): Promise { - // TODO: Just to throw a console warning that this ENV is obsolete and could be safely removed - getEnv('databrickAcceptPolicy'); - console.log(`Downloading databricks-jdbc-${OSS_DRIVER_VERSION}-oss.jar`); await downloadAndExtractFile( From 3be8a2299aa57f69c060e9147b6c6b2bf10107b2 Mon Sep 17 00:00:00 2001 From: Konstantin Burkalev Date: Tue, 3 Jun 2025 14:28:45 +0300 Subject: [PATCH 2/3] fix databricks env var names --- packages/cubejs-backend-shared/src/env.ts | 4 +-- .../test/db_env_multi.test.ts | 36 +++++++++---------- .../test/db_env_single.test.ts | 36 +++++++++---------- .../src/DatabricksDriver.ts | 4 +-- 4 files changed, 40 insertions(+), 40 deletions(-) diff --git a/packages/cubejs-backend-shared/src/env.ts b/packages/cubejs-backend-shared/src/env.ts index bd6307dd1a48f..80f9da3504818 100644 --- a/packages/cubejs-backend-shared/src/env.ts +++ b/packages/cubejs-backend-shared/src/env.ts @@ -954,7 +954,7 @@ const variables: Record any> = { /** * Databricks jdbc-connection url. */ - databrickUrl: ({ + databricksUrl: ({ dataSource, }: { dataSource: string, @@ -975,7 +975,7 @@ const variables: Record any> = { /** * Databricks jdbc-connection token. */ - databrickToken: ({ + databricksToken: ({ dataSource, }: { dataSource: string, diff --git a/packages/cubejs-backend-shared/test/db_env_multi.test.ts b/packages/cubejs-backend-shared/test/db_env_multi.test.ts index 7219d039e422a..c93b0fa4329fb 100644 --- a/packages/cubejs-backend-shared/test/db_env_multi.test.ts +++ b/packages/cubejs-backend-shared/test/db_env_multi.test.ts @@ -1105,31 +1105,31 @@ describe('Multiple datasources', () => { process.env.CUBEJS_DB_DATABRICKS_URL = 'default1'; process.env.CUBEJS_DS_POSTGRES_DB_DATABRICKS_URL = 'postgres1'; process.env.CUBEJS_DS_WRONG_DB_DATABRICKS_URL = 'wrong1'; - expect(getEnv('databrickUrl', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('databrickUrl', { dataSource: 'postgres' })).toEqual('postgres1'); - expect(() => getEnv('databrickUrl', { dataSource: 'wrong' })).toThrow( + expect(getEnv('databricksUrl', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('databricksUrl', { dataSource: 'postgres' })).toEqual('postgres1'); + expect(() => getEnv('databricksUrl', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); process.env.CUBEJS_DB_DATABRICKS_URL = 'default2'; process.env.CUBEJS_DS_POSTGRES_DB_DATABRICKS_URL = 'postgres2'; process.env.CUBEJS_DS_WRONG_DB_DATABRICKS_URL = 'wrong2'; - expect(getEnv('databrickUrl', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('databrickUrl', { dataSource: 'postgres' })).toEqual('postgres2'); - expect(() => getEnv('databrickUrl', { dataSource: 'wrong' })).toThrow( + expect(getEnv('databricksUrl', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('databricksUrl', { dataSource: 'postgres' })).toEqual('postgres2'); + expect(() => getEnv('databricksUrl', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); delete process.env.CUBEJS_DB_DATABRICKS_URL; delete process.env.CUBEJS_DS_POSTGRES_DB_DATABRICKS_URL; delete process.env.CUBEJS_DS_WRONG_DB_DATABRICKS_URL; - expect(() => getEnv('databrickUrl', { dataSource: 'default' })).toThrow( + expect(() => getEnv('databricksUrl', { dataSource: 'default' })).toThrow( 'The CUBEJS_DB_DATABRICKS_URL is required and missing.' ); - expect(() => getEnv('databrickUrl', { dataSource: 'postgres' })).toThrow( + expect(() => getEnv('databricksUrl', { dataSource: 'postgres' })).toThrow( 'The CUBEJS_DS_POSTGRES_DB_DATABRICKS_URL is required and missing.' ); - expect(() => getEnv('databrickUrl', { dataSource: 'wrong' })).toThrow( + expect(() => getEnv('databricksUrl', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); }); @@ -1138,27 +1138,27 @@ describe('Multiple datasources', () => { process.env.CUBEJS_DB_DATABRICKS_TOKEN = 'default1'; process.env.CUBEJS_DS_POSTGRES_DB_DATABRICKS_TOKEN = 'postgres1'; process.env.CUBEJS_DS_WRONG_DB_DATABRICKS_TOKEN = 'wrong1'; - expect(getEnv('databrickToken', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('databrickToken', { dataSource: 'postgres' })).toEqual('postgres1'); - expect(() => getEnv('databrickToken', { dataSource: 'wrong' })).toThrow( + expect(getEnv('databricksToken', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('databricksToken', { dataSource: 'postgres' })).toEqual('postgres1'); + expect(() => getEnv('databricksToken', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); process.env.CUBEJS_DB_DATABRICKS_TOKEN = 'default2'; process.env.CUBEJS_DS_POSTGRES_DB_DATABRICKS_TOKEN = 'postgres2'; process.env.CUBEJS_DS_WRONG_DB_DATABRICKS_TOKEN = 'wrong2'; - expect(getEnv('databrickToken', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('databrickToken', { dataSource: 'postgres' })).toEqual('postgres2'); - expect(() => getEnv('databrickToken', { dataSource: 'wrong' })).toThrow( + expect(getEnv('databricksToken', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('databricksToken', { dataSource: 'postgres' })).toEqual('postgres2'); + expect(() => getEnv('databricksToken', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); delete process.env.CUBEJS_DB_DATABRICKS_TOKEN; delete process.env.CUBEJS_DS_POSTGRES_DB_DATABRICKS_TOKEN; delete process.env.CUBEJS_DS_WRONG_DB_DATABRICKS_TOKEN; - expect(getEnv('databrickToken', { dataSource: 'default' })).toBeUndefined(); - expect(getEnv('databrickToken', { dataSource: 'postgres' })).toBeUndefined(); - expect(() => getEnv('databrickToken', { dataSource: 'wrong' })).toThrow( + expect(getEnv('databricksToken', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('databricksToken', { dataSource: 'postgres' })).toBeUndefined(); + expect(() => getEnv('databricksToken', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); }); diff --git a/packages/cubejs-backend-shared/test/db_env_single.test.ts b/packages/cubejs-backend-shared/test/db_env_single.test.ts index 1dd5612309f32..411aa0eb79558 100644 --- a/packages/cubejs-backend-shared/test/db_env_single.test.ts +++ b/packages/cubejs-backend-shared/test/db_env_single.test.ts @@ -705,42 +705,42 @@ describe('Single datasources', () => { test('getEnv("databrickUrl")', () => { process.env.CUBEJS_DB_DATABRICKS_URL = 'default1'; - expect(getEnv('databrickUrl', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('databrickUrl', { dataSource: 'postgres' })).toEqual('default1'); - expect(getEnv('databrickUrl', { dataSource: 'wrong' })).toEqual('default1'); + expect(getEnv('databricksUrl', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('databricksUrl', { dataSource: 'postgres' })).toEqual('default1'); + expect(getEnv('databricksUrl', { dataSource: 'wrong' })).toEqual('default1'); process.env.CUBEJS_DB_DATABRICKS_URL = 'default2'; - expect(getEnv('databrickUrl', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('databrickUrl', { dataSource: 'postgres' })).toEqual('default2'); - expect(getEnv('databrickUrl', { dataSource: 'wrong' })).toEqual('default2'); + expect(getEnv('databricksUrl', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('databricksUrl', { dataSource: 'postgres' })).toEqual('default2'); + expect(getEnv('databricksUrl', { dataSource: 'wrong' })).toEqual('default2'); delete process.env.CUBEJS_DB_DATABRICKS_URL; - expect(() => getEnv('databrickUrl', { dataSource: 'default' })).toThrow( + expect(() => getEnv('databricksUrl', { dataSource: 'default' })).toThrow( 'The CUBEJS_DB_DATABRICKS_URL is required and missing.' ); - expect(() => getEnv('databrickUrl', { dataSource: 'postgres' })).toThrow( + expect(() => getEnv('databricksUrl', { dataSource: 'postgres' })).toThrow( 'The CUBEJS_DB_DATABRICKS_URL is required and missing.' ); - expect(() => getEnv('databrickUrl', { dataSource: 'wrong' })).toThrow( + expect(() => getEnv('databricksUrl', { dataSource: 'wrong' })).toThrow( 'The CUBEJS_DB_DATABRICKS_URL is required and missing.' ); }); test('getEnv("databrickToken")', () => { process.env.CUBEJS_DB_DATABRICKS_TOKEN = 'default1'; - expect(getEnv('databrickToken', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('databrickToken', { dataSource: 'postgres' })).toEqual('default1'); - expect(getEnv('databrickToken', { dataSource: 'wrong' })).toEqual('default1'); + expect(getEnv('databricksToken', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('databricksToken', { dataSource: 'postgres' })).toEqual('default1'); + expect(getEnv('databricksToken', { dataSource: 'wrong' })).toEqual('default1'); process.env.CUBEJS_DB_DATABRICKS_TOKEN = 'default2'; - expect(getEnv('databrickToken', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('databrickToken', { dataSource: 'postgres' })).toEqual('default2'); - expect(getEnv('databrickToken', { dataSource: 'wrong' })).toEqual('default2'); + expect(getEnv('databricksToken', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('databricksToken', { dataSource: 'postgres' })).toEqual('default2'); + expect(getEnv('databricksToken', { dataSource: 'wrong' })).toEqual('default2'); delete process.env.CUBEJS_DB_DATABRICKS_TOKEN; - expect(getEnv('databrickToken', { dataSource: 'default' })).toBeUndefined(); - expect(getEnv('databrickToken', { dataSource: 'postgres' })).toBeUndefined(); - expect(getEnv('databrickToken', { dataSource: 'wrong' })).toBeUndefined(); + expect(getEnv('databricksToken', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('databricksToken', { dataSource: 'postgres' })).toBeUndefined(); + expect(getEnv('databricksToken', { dataSource: 'wrong' })).toBeUndefined(); }); test('getEnv("databricksCatalog")', () => { diff --git a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts index 85cc01578951a..583953096b26b 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts @@ -192,7 +192,7 @@ export class DatabricksDriver extends JDBCDriver { let showSparkProtocolWarn = false; let url: string = conf?.url || - getEnv('databrickUrl', { dataSource }) || + getEnv('databricksUrl', { dataSource }) || getEnv('jdbcUrl', { dataSource }); if (url.indexOf('jdbc:spark://') !== -1) { showSparkProtocolWarn = true; @@ -211,7 +211,7 @@ export class DatabricksDriver extends JDBCDriver { UID: uid, PWD: conf?.token || - getEnv('databrickToken', { dataSource }) || + getEnv('databricksToken', { dataSource }) || pwd, UserAgentEntry: 'CubeDev_Cube', }, From 7c6342c65eac12358d9777e5956d18d37ea9de42 Mon Sep 17 00:00:00 2001 From: Konstantin Burkalev Date: Tue, 3 Jun 2025 16:19:58 +0300 Subject: [PATCH 3/3] prepare CI --- .github/workflows/drivers-tests.yml | 2 ++ .../cubejs-testing-drivers/fixtures/databricks-jdbc.json | 6 +++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/drivers-tests.yml b/.github/workflows/drivers-tests.yml index b323c47961c0a..56ccaacaad269 100644 --- a/.github/workflows/drivers-tests.yml +++ b/.github/workflows/drivers-tests.yml @@ -355,6 +355,8 @@ jobs: DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_TOKEN: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_TOKEN }} DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY }} DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET }} + DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_ID: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_ID }} + DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_SECRET: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_SECRET }} # Redshift DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_HOST: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_HOST }} diff --git a/packages/cubejs-testing-drivers/fixtures/databricks-jdbc.json b/packages/cubejs-testing-drivers/fixtures/databricks-jdbc.json index fb19c793fe6c2..0dc7bd8106dfe 100644 --- a/packages/cubejs-testing-drivers/fixtures/databricks-jdbc.json +++ b/packages/cubejs-testing-drivers/fixtures/databricks-jdbc.json @@ -7,7 +7,11 @@ "CUBEJS_DB_EXPORT_BUCKET": "s3://databricks-drivers-tests-preaggs", "CUBEJS_DB_EXPORT_BUCKET_AWS_KEY": "${DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY}", "CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET": "${DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET}", - "CUBEJS_DB_EXPORT_BUCKET_AWS_REGION": "us-east-1" + "CUBEJS_DB_EXPORT_BUCKET_AWS_REGION": "us-east-1", + "Cannot_left_comments_in_json": "Use OAuth machine-to-machine (M2M) authentication here for testing it too", + "CUBEJS_DB_DATABRICKS_TOKEN": "", + "CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_ID": "${DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_ID}", + "CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_SECRET": "${DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_SECRET}" } } },