@@ -18,7 +18,10 @@ import {
1818} from '@cubejs-backend/base-driver' ;
1919import { JDBCDriver , JDBCDriverConfiguration , } from '@cubejs-backend/jdbc-driver' ;
2020import { DatabricksQuery } from './DatabricksQuery' ;
21- import { extractUidFromJdbcUrl , resolveJDBCDriver } from './helpers' ;
21+ import {
22+ extractAndRemoveUidPwdFromJdbcUrl ,
23+ resolveJDBCDriver
24+ } from './helpers' ;
2225
2326export type DatabricksDriverConfiguration = JDBCDriverConfiguration &
2427 {
@@ -180,20 +183,20 @@ export class DatabricksDriver extends JDBCDriver {
180183 url = url . replace ( 'jdbc:spark://' , 'jdbc:databricks://' ) ;
181184 }
182185
186+ const [ uid , pwd , cleanedUrl ] = extractAndRemoveUidPwdFromJdbcUrl ( url ) ;
187+
183188 const config : DatabricksDriverConfiguration = {
184189 ...conf ,
185- url,
190+ url : cleanedUrl ,
186191 dbType : 'databricks' ,
187192 drivername : 'com.databricks.client.jdbc.Driver' ,
188193 customClassPath : undefined ,
189194 properties : {
190- UID : extractUidFromJdbcUrl ( url ) ,
191- // PWD-parameter passed to the connection string has higher priority,
192- // so we can set this one to an empty string to avoid a Java error.
195+ UID : uid ,
193196 PWD :
194197 conf ?. token ||
195198 getEnv ( 'databrickToken' , { dataSource } ) ||
196- '' ,
199+ pwd ,
197200 UserAgentEntry : 'CubeDev_Cube' ,
198201 } ,
199202 catalog :
@@ -520,7 +523,7 @@ export class DatabricksDriver extends JDBCDriver {
520523 * Returns table columns types.
521524 */
522525 public override async tableColumnTypes ( table : string ) : Promise < TableColumn [ ] > {
523- let tableFullName = '' ;
526+ let tableFullName : string ;
524527 const tableArray = table . split ( '.' ) ;
525528
526529 if ( tableArray . length === 3 ) {
0 commit comments