Skip to content

Commit 6d2f97a

Browse files
feat(cubejs-databricks-jdbc-driver): read-only mode (#8440)
* Add .editorconfig for DX convenience * feat(cubejs-databricks-jdbc-driver): set readonly mode if no exportBicket options provided * chore(cubejs-databricks-jdbc-driver): polish driver interfaces/implementation * Update docs * docs(jdbc-driver): update driver installation on macos --------- Co-authored-by: Igor Lukanin <[email protected]>
1 parent 2b3bdcd commit 6d2f97a

File tree

5 files changed

+56
-24
lines changed

5 files changed

+56
-24
lines changed

.editorconfig

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# editorconfig.org
2+
root = true
3+
4+
[*]
5+
indent_style = space
6+
indent_size = 2
7+
end_of_line = lf
8+
charset = utf-8
9+
trim_trailing_whitespace = true
10+
insert_final_newline = true

docs/pages/product/configuration/data-sources/databricks-jdbc.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ here][ref-caching-using-preaggs-build-strats].
8484

8585
| Feature | Works with read-only mode? | Is default? |
8686
| ------------- | :------------------------: | :---------: |
87-
| Simple | ||
87+
| Simple | ||
8888
| Export Bucket |||
8989

9090
By default, Databricks JDBC uses a [simple][self-preaggs-simple] strategy to

packages/cubejs-base-driver/src/driver.interface.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -230,6 +230,7 @@ export interface DriverInterface {
230230
queryColumnTypes: (sql: string, params: unknown[]) => Promise<{ name: any; type: string; }[]>;
231231
//
232232
getSchemas: () => Promise<QuerySchemasResult[]>;
233+
tablesSchema: () => Promise<any>;
233234
getTablesForSpecificSchemas: (schemas: QuerySchemasResult[]) => Promise<QueryTablesResult[]>;
234235
getColumnsForSpecificTables: (tables: QueryTablesResult[]) => Promise<QueryColumnsResult[]>;
235236
// eslint-disable-next-line camelcase
@@ -246,7 +247,7 @@ export interface DriverInterface {
246247
* queried fields types.
247248
*/
248249
stream?: (table: string, values: unknown[], options: StreamOptions) => Promise<StreamTableData>;
249-
250+
250251
/**
251252
* Returns to the Cubestore an object with links to unloaded to an
252253
* export bucket data.
@@ -258,7 +259,7 @@ export interface DriverInterface {
258259
* Determines whether export bucket feature is configured or not.
259260
*/
260261
isUnloadSupported?: (options: UnloadOptions) => Promise<boolean>;
261-
262+
262263
// Current timestamp, defaults to new Date().getTime()
263264
nowTimestamp(): number;
264265
// Shutdown the driver

packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ export type DatabricksDriverConfiguration = JDBCDriverConfiguration &
7171
* Export bucket AWS account region.
7272
*/
7373
awsRegion?: string,
74-
74+
7575
/**
7676
* Export bucket Azure account key.
7777
*/
@@ -119,7 +119,7 @@ export class DatabricksDriver extends JDBCDriver {
119119
private showSparkProtocolWarn: boolean;
120120

121121
/**
122-
* Read-only mode flag.
122+
* Driver Configuration.
123123
*/
124124
protected readonly config: DatabricksDriverConfiguration;
125125

@@ -221,6 +221,11 @@ export class DatabricksDriver extends JDBCDriver {
221221
exportBucketCsvEscapeSymbol:
222222
getEnv('dbExportBucketCsvEscapeSymbol', { dataSource }),
223223
};
224+
if (config.readOnly === undefined) {
225+
// we can set readonly to true if there is no bucket config provided
226+
config.readOnly = !config.exportBucket;
227+
}
228+
224229
super(config);
225230
this.config = config;
226231
this.showSparkProtocolWarn = showSparkProtocolWarn;
@@ -289,10 +294,11 @@ export class DatabricksDriver extends JDBCDriver {
289294
values: unknown[],
290295
): Promise<R[]> {
291296
if (this.config.catalog) {
297+
const preAggSchemaName = this.getPreAggrSchemaName();
292298
return super.query(
293299
query.replace(
294-
new RegExp(`(?<=\\s)${this.getPreaggsSchemaName()}\\.(?=[^\\s]+)`, 'g'),
295-
`${this.config.catalog}.${this.getPreaggsSchemaName()}.`
300+
new RegExp(`(?<=\\s)${preAggSchemaName}\\.(?=[^\\s]+)`, 'g'),
301+
`${this.config.catalog}.${preAggSchemaName}.`
296302
),
297303
values,
298304
);
@@ -304,7 +310,7 @@ export class DatabricksDriver extends JDBCDriver {
304310
/**
305311
* Returns pre-aggregation schema name.
306312
*/
307-
public getPreaggsSchemaName(): string {
313+
protected getPreAggrSchemaName(): string {
308314
const schema = getEnv('preAggregationsSchema');
309315
if (schema) {
310316
return schema;
@@ -327,7 +333,7 @@ export class DatabricksDriver extends JDBCDriver {
327333
return super.dropTable(tableFullName, options);
328334
}
329335

330-
public showDeprecations() {
336+
private showDeprecations() {
331337
if (this.config.url) {
332338
const result = this.config.url
333339
.split(';')
@@ -387,7 +393,7 @@ export class DatabricksDriver extends JDBCDriver {
387393
/**
388394
* Returns tables meta data object.
389395
*/
390-
public async tablesSchema(): Promise<Record<string, Record<string, object>>> {
396+
public override async tablesSchema(): Promise<Record<string, Record<string, object>>> {
391397
const tables = await this.getTables();
392398

393399
const metadata: Record<string, Record<string, object>> = {};
@@ -407,7 +413,7 @@ export class DatabricksDriver extends JDBCDriver {
407413
/**
408414
* Returns list of accessible tables.
409415
*/
410-
public async getTables(): Promise<ShowTableRow[]> {
416+
private async getTables(): Promise<ShowTableRow[]> {
411417
if (this.config.database) {
412418
return <any> this.query<ShowTableRow>(
413419
`SHOW TABLES IN ${
@@ -492,7 +498,7 @@ export class DatabricksDriver extends JDBCDriver {
492498
/**
493499
* Returns table columns types.
494500
*/
495-
public async tableColumnTypes(table: string): Promise<{ name: any; type: string; }[]> {
501+
public override async tableColumnTypes(table: string): Promise<{ name: any; type: string; }[]> {
496502
let tableFullName = '';
497503
const tableArray = table.split('.');
498504

@@ -567,7 +573,7 @@ export class DatabricksDriver extends JDBCDriver {
567573
/**
568574
* Returns schema full name.
569575
*/
570-
public getSchemaFullName(schema: string): string {
576+
private getSchemaFullName(schema: string): string {
571577
if (this.config?.catalog) {
572578
return `${
573579
this.quoteIdentifier(this.config.catalog)
@@ -582,14 +588,14 @@ export class DatabricksDriver extends JDBCDriver {
582588
/**
583589
* Returns quoted string.
584590
*/
585-
public quoteIdentifier(identifier: string): string {
591+
protected quoteIdentifier(identifier: string): string {
586592
return `\`${identifier}\``;
587593
}
588594

589595
/**
590596
* Returns the JS type by the Databricks type.
591597
*/
592-
public toGenericType(columnType: string): string {
598+
protected toGenericType(columnType: string): string {
593599
return DatabricksToGenericType[columnType.toLowerCase()] || super.toGenericType(columnType);
594600
}
595601

@@ -639,7 +645,7 @@ export class DatabricksDriver extends JDBCDriver {
639645
const types = await this.queryColumnTypes(sql, params);
640646

641647
await this.createExternalTableFromSql(tableFullName, sql, params, types);
642-
648+
643649
return types;
644650
}
645651

@@ -650,7 +656,7 @@ export class DatabricksDriver extends JDBCDriver {
650656
const types = await this.tableColumnTypes(tableFullName);
651657

652658
await this.createExternalTableFromTable(tableFullName, types);
653-
659+
654660
return types;
655661
}
656662

packages/cubejs-jdbc-driver/README.md

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,20 +11,37 @@ JDBC driver.
1111

1212
## Support
1313

14-
This package is **community supported** and should be used at your own risk.
14+
This package is **community supported** and should be used at your own risk.
1515

16-
While the Cube Dev team is happy to review and accept future community contributions, we don't have active plans for further development. This includes bug fixes unless they affect different parts of Cube.js. **We're looking for maintainers for this package.** If you'd like to become a maintainer, please contact us in Cube.js Slack.
16+
While the Cube Dev team is happy to review and accept future community contributions, we don't have active plans for
17+
further development. This includes bug fixes unless they affect different parts of Cube.js. **We're looking for
18+
maintainers for this package.** If you'd like to become a maintainer, please contact us in Cube.js Slack.
1719

1820
## Java installation
1921

2022
### macOS
2123

2224
```sh
23-
brew install openjdk@8
24-
sudo ln -sfn /usr/local/opt/openjdk@8/libexec/openjdk.jdk /Library/Java/JavaVirtualMachines/openjdk-8.jdk
25+
brew install openjdk
26+
# At the moment of writing, openjdk 22.0.1 is the latest and proven to work on Intel/M1 Mac's
27+
# Follow the brew suggested advice at the end of installation:
28+
# For the system Java wrappers to find this JDK, symlink it with
29+
sudo ln -sfn /usr/local/opt/openjdk/libexec/openjdk.jdk /Library/Java/JavaVirtualMachines/openjdk.jdk
30+
31+
# Ensure that newly installed jdk is visible
32+
/usr/libexec/java_home -V
33+
# You should see installed jdk among others, something like this:
34+
Matching Java Virtual Machines (3):
35+
22.0.1 (x86_64) "Homebrew" - "OpenJDK 22.0.1" /usr/local/Cellar/openjdk/22.0.1/libexec/openjdk.jdk/Contents/Home
36+
1.8.0_40 (x86_64) "Oracle Corporation" - "Java SE 8" /Library/Java/JavaVirtualMachines/jdk1.8.0_40.jdk/Contents/Home
37+
38+
# Set JAVA_HOME environment variable before running yarn in the Cube repo
39+
export JAVA_HOME=`/usr/libexec/java_home -v 22.0.1`
2540
```
2641

27-
If this doesn't work, please run commands from `$ brew info openjdk@8`.
42+
**Note:** It's important to set `JAVA_HOME` prior to running `yarn/npm install` in Cube repo as Java Bridge npm package
43+
uses is to locate JAVA and caches it internally. In case you already run package installation you have to rebuild
44+
all native packages or just delete `node_modules` and run `yarn` again.
2845

2946
### Debian, Ubuntu, etc.
3047

@@ -46,8 +63,6 @@ If you have Chocolatey packet manager:
4663
choco install openjdk
4764
```
4865

49-
Or download it from
50-
5166
## License
5267

5368
Cube.js JDBC Database Driver is [Apache 2.0 licensed](./LICENSE).

0 commit comments

Comments
 (0)