Skip to content

Commit e4ea6fd

Browse files
Merge pull request #3 from db-caioishizaka/0_50_support
0 50 support
2 parents dee1d7a + 69d042e commit e4ea6fd

File tree

8 files changed

+64
-33
lines changed

8 files changed

+64
-33
lines changed

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ COPY . /driver
55
RUN apt-get update && apt-get install -y \
66
curl
77

8-
ARG METABASE_VERSION="v0.49.3"
8+
ARG METABASE_VERSION="v0.50.4"
99

1010
RUN curl -Lo - https://github.com/metabase/metabase/archive/refs/tags/${METABASE_VERSION}.tar.gz | tar -xz && mv metabase-* /metabase
1111

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11

22
ROOT_DIR:=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
3-
METABASE_VERSION=v0.49.3
3+
METABASE_VERSION=v0.50.4
44

55
build:
66
@echo "build"

README.md

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
## Installation
44

5-
Beginning with Metabase 0.32, drivers must be stored in a `plugins` directory in the same directory where `metabase.jar` is, or you can specify the directory by setting the environment variable `MB_PLUGINS_DIR`. There are a few options to get up and running with a custom driver.
5+
Drivers must be stored in a `plugins` directory in the same directory where `metabase.jar` is, or you can specify the directory by setting the environment variable `MB_PLUGINS_DIR`. There are a few options to get up and running with a custom driver.
66

77
You can find jar file on the [release page](https://github.com/schumannc/databricks-sql-driver/releases) or you can build it locally.
88
## Build
@@ -22,9 +22,15 @@ Once the Metabase startup completes, you can access your Metabase at `localhost:
2222

2323
## Usage
2424

25-
Copy `host`, `http-path`, `personal-access-token`, `Catalog` and `database` to metabase form.
25+
Copy `host`, `http-path`, `personal-access-token`, `Catalog` from your Databricks SQL Warehouse page (Connection Details tab) to metabase form.
26+
27+
Since 0.1.0, you don't need to specify the schema/database. Now you can opt between bringing all schemas, just a few named schemas, or all schemas but a selected few.
28+
29+
## Compatibility
30+
31+
Latest version (0.1.0) is compatible with Metabase 0.50 onwards, it is not back compatible with 0.49. For earlier versions check previous releases
2632

2733

2834
![](screenshots/databricks-sql.png)
29-
![](screenshots/metabase-form-2.png)
35+
![](screenshots/metabase-form-0_1_0.png)
3036

deps.edn

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
["src" "resources"]
33

44
:deps
5-
{com.databricks/databricks-jdbc {:mvn/version "2.6.36" :exclusion [log4j/log4j]}}
5+
{com.databricks/databricks-jdbc {:mvn/version "2.6.38" :exclusion [log4j/log4j]}}
66

77
;; the stuff below is only for hacking on the driver locally and is not needed if you follow the instructions in the
88
;; README and create a `:local/root` dep for the driver and launch the REPL from the Metabase project rather than

resources/metabase-plugin.yaml

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
info:
22
name: Metabase Databricks SQL Driver
3-
version: 1.0.0-SNAPSHOT-0.0.1
3+
version: 0.1.0
44
description: Allows Metabase to connect to Databricks SQL werehouse
55
dependencies:
66
- plugin: Metabase Spark SQL Driver
@@ -20,11 +20,10 @@ driver:
2020
- name: db
2121
display-name: Catalog
2222
default: hive_metastore
23-
required: true
24-
- name: schema
25-
display-name: Database
26-
default: default
27-
required: true
23+
required: true
24+
- name: schema-filters
25+
type: schema-filters
26+
display-name: Schemas
2827
- advanced-options-start
2928
- default-advanced-options
3029
init:
115 KB
Loading

screenshots/metabase-form-2.png

-116 KB
Binary file not shown.

src/metabase/driver/databricks_sql.clj

Lines changed: 47 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,13 @@
1111
[metabase.driver.sql.query-processor :as sql.qp]
1212
[metabase.driver.sql.util :as sql.u]
1313
[metabase.driver.sql.util.unprepare :as unprepare]
14-
[metabase.mbql.util :as mbql.u]
14+
[metabase.driver.sync :as driver.s]
15+
[metabase.legacy-mbql.util :as mbql.u]
1516
[metabase.query-processor.util :as qp.util]
1617
[java-time :as t]
1718
[metabase.util :as u]
18-
[metabase.util.date-2 :as u.date])
19+
[metabase.util.date-2 :as u.date]
20+
[metabase.util.log :as log])
1921
(:import [java.sql Connection ResultSet Types]
2022
[java.time LocalDate OffsetDateTime ZonedDateTime]))
2123

@@ -67,16 +69,31 @@
6769
#"map" :type/Dictionary
6870
#".*" :type/*))
6971

70-
;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata
72+
(def ^:private get-tables-sql
73+
[(str/join
74+
"\n"
75+
["select"
76+
" table_name as name,"
77+
" table_schema as schema,"
78+
" table_type as type,"
79+
" comment as description"
80+
" from information_schema.tables"
81+
" where table_schema NOT IN ('information_schema')"])])
82+
83+
(defn- describe-database-tables
84+
[database]
85+
(let [[inclusion-patterns
86+
exclusion-patterns] (driver.s/db-details->schema-filter-patterns database)
87+
syncable? (fn [schema]
88+
(driver.s/include-schema? inclusion-patterns exclusion-patterns schema))]
89+
(eduction
90+
(comp (filter (comp syncable? :schema))
91+
(map #(dissoc % :type)))
92+
(sql-jdbc.execute/reducible-query database get-tables-sql))))
93+
7194
(defmethod driver/describe-database :databricks-sql
72-
[_ database]
73-
{:tables
74-
(with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))]
75-
(set
76-
(for [{:keys [database tablename], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])]
77-
{:name tablename
78-
:schema (or (not-empty database)
79-
(not-empty table-namespace))})))})
95+
[_driver database]
96+
{:tables (into #{} (describe-database-tables database))})
8097

8198
;; Hive describe table result has commented rows to distinguish partitions
8299
(defn- valid-describe-table-row? [{:keys [col_name data_type]}]
@@ -160,19 +177,28 @@
160177
;; the current HiveConnection doesn't support .createStatement
161178
(defmethod sql-jdbc.execute/statement-supported? :databricks-sql [_] false)
162179

163-
(doseq [feature [:basic-aggregations
164-
:binning
165-
:expression-aggregations
166-
:expressions
167-
:native-parameters
168-
:nested-queries
169-
:standard-deviation-aggregations]]
170-
(defmethod driver/supports? [:databricks-sql feature] [_ _] true))
180+
181+
(doseq [[feature supported?] {:basic-aggregations true
182+
:binning true
183+
:expression-aggregations true
184+
:expressions true
185+
:full-join true
186+
:right-join true
187+
:left-join true
188+
:inner-join true
189+
:foreign-keys false
190+
:native-parameters true
191+
:nested-queries true
192+
:standard-deviation-aggregations true
193+
:metadata/key-constraints false
194+
:test/jvm-timezone-setting false
195+
:window-functions/cumulative false}]
196+
(defmethod driver/database-supports? [:databricks-sql feature] [_driver _feature _db] supported?))
171197

172198
;; only define an implementation for `:foreign-keys` if none exists already. In test extensions we define an alternate
173199
;; implementation, and we don't want to stomp over that if it was loaded already
174-
(when-not (get (methods driver/supports?) [:databricks-sql :foreign-keys])
175-
(defmethod driver/supports? [:databricks-sql :foreign-keys] [_ _] true))
200+
(when-not (get (methods driver/database-supports?) [:databricks-sql :foreign-keys])
201+
(defmethod driver/database-supports? [:databricks-sql :foreign-keys] [_ _] true))
176202

177203
(defmethod sql.qp/quote-style :databricks-sql [_] :mysql)
178204

0 commit comments

Comments
 (0)