@@ -103,6 +103,11 @@ export type DatabricksDriverConfiguration = JDBCDriverConfiguration &
103103 * Azure service principal client secret
104104 */
105105 azureClientSecret ?: string ,
106+
107+ /**
108+ * GCS credentials JSON content
109+ */
110+ gcsCredentials ?: string ,
106111 } ;
107112
108113type ShowTableRow = {
@@ -209,7 +214,7 @@ export class DatabricksDriver extends JDBCDriver {
209214 // common export bucket config
210215 bucketType :
211216 conf ?. bucketType ||
212- getEnv ( 'dbExportBucketType' , { supported : [ 's3' , 'azure' ] , dataSource } ) ,
217+ getEnv ( 'dbExportBucketType' , { supported : [ 's3' , 'azure' , 'gcs' ] , dataSource } ) ,
213218 exportBucket :
214219 conf ?. exportBucket ||
215220 getEnv ( 'dbExportBucket' , { dataSource } ) ,
@@ -246,6 +251,10 @@ export class DatabricksDriver extends JDBCDriver {
246251 azureClientSecret :
247252 conf ?. azureClientSecret ||
248253 getEnv ( 'dbExportBucketAzureClientSecret' , { dataSource } ) ,
254+ // GCS credentials
255+ gcsCredentials :
256+ conf ?. gcsCredentials ||
257+ getEnv ( 'dbExportGCSCredentials' , { dataSource } ) ,
249258 } ;
250259 if ( config . readOnly === undefined ) {
251260 // we can set readonly to true if there is no bucket config provided
@@ -643,16 +652,19 @@ export class DatabricksDriver extends JDBCDriver {
643652 * export bucket data.
644653 */
645654 public async unload ( tableName : string , options : UnloadOptions ) {
646- if ( ! [ 'azure' , 's3' ] . includes ( this . config . bucketType as string ) ) {
655+ if ( ! [ 'azure' , 's3' , 'gcs' ] . includes ( this . config . bucketType as string ) ) {
647656 throw new Error ( `Unsupported export bucket type: ${
648657 this . config . bucketType
649658 } `) ;
650659 }
660+ // Construct a fully qualified table name with proper quoting
661+ // 1. Quotes are needed to handle special characters in identifiers, e.g. `my-table`
662+ // 2. Table name may include schema (e.g. 'schema.table'), so we split and quote each part, e.g. `schema`.`table`
651663 const tableFullName = `${
652664 this . config . catalog
653- ? `${ this . config . catalog } .`
665+ ? `${ this . quoteIdentifier ( this . config . catalog ) } .`
654666 : ''
655- } ${ tableName } `;
667+ } ${ tableName . split ( '.' ) . map ( part => this . quoteIdentifier ( part ) ) . join ( '.' ) } `;
656668 const types = options . query
657669 ? await this . unloadWithSql (
658670 tableFullName ,
@@ -733,6 +745,12 @@ export class DatabricksDriver extends JDBCDriver {
733745 url . host ,
734746 objectSearchPrefix ,
735747 ) ;
748+ } else if ( this . config . bucketType === 'gcs' ) {
749+ return this . extractFilesFromGCS (
750+ { credentials : this . config . gcsCredentials } ,
751+ url . host ,
752+ objectSearchPrefix ,
753+ ) ;
736754 } else {
737755 throw new Error ( `Unsupported export bucket type: ${
738756 this . config . bucketType
@@ -769,6 +787,9 @@ export class DatabricksDriver extends JDBCDriver {
769787 *
770788 * `fs.s3a.access.key <aws-access-key>`
771789 * `fs.s3a.secret.key <aws-secret-key>`
790+ * For Google cloud storage you can configure storage credentials and create an external location to access it
791+ * (https://docs.databricks.com/gcp/en/connect/unity-catalog/cloud-storage/storage-credentials
792+ * https://docs.databricks.com/gcp/en/connect/unity-catalog/cloud-storage/external-locations)
772793 */
773794 private async createExternalTableFromSql ( tableFullName : string , sql : string , params : unknown [ ] , columns : ColumnInfo [ ] ) {
774795 let select = sql ;
0 commit comments