|
11 | 11 | [metabase.driver.sql.query-processor :as sql.qp] |
12 | 12 | [metabase.driver.sql.util :as sql.u] |
13 | 13 | [metabase.driver.sql.util.unprepare :as unprepare] |
14 | | - [metabase.mbql.util :as mbql.u] |
| 14 | + [metabase.driver.sync :as driver.s] |
| 15 | + [metabase.legacy-mbql.util :as mbql.u] |
15 | 16 | [metabase.query-processor.util :as qp.util] |
16 | 17 | [java-time :as t] |
17 | 18 | [metabase.util :as u] |
18 | | - [metabase.util.date-2 :as u.date]) |
| 19 | + [metabase.util.date-2 :as u.date] |
| 20 | + [metabase.util.log :as log]) |
19 | 21 | (:import [java.sql Connection ResultSet Types] |
20 | 22 | [java.time LocalDate OffsetDateTime ZonedDateTime])) |
21 | 23 |
|
|
67 | 69 | #"map" :type/Dictionary |
68 | 70 | #".*" :type/*)) |
69 | 71 |
|
70 | | -;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata |
| 72 | +(def ^:private get-tables-sql |
| 73 | + [(str/join |
| 74 | + "\n" |
| 75 | + ["select" |
| 76 | + " table_name as name," |
| 77 | + " table_schema as schema," |
| 78 | + " table_type as type," |
| 79 | + " comment as description" |
| 80 | + " from information_schema.tables" |
| 81 | + " where table_schema NOT IN ('information_schema')"])]) |
| 82 | + |
| 83 | +(defn- describe-database-tables |
| 84 | + [database] |
| 85 | + (let [[inclusion-patterns |
| 86 | + exclusion-patterns] (driver.s/db-details->schema-filter-patterns database) |
| 87 | + syncable? (fn [schema] |
| 88 | + (driver.s/include-schema? inclusion-patterns exclusion-patterns schema))] |
| 89 | + (eduction |
| 90 | + (comp (filter (comp syncable? :schema)) |
| 91 | + (map #(dissoc % :type))) |
| 92 | + (sql-jdbc.execute/reducible-query database get-tables-sql)))) |
| 93 | + |
71 | 94 | (defmethod driver/describe-database :databricks-sql |
72 | | - [_ database] |
73 | | - {:tables |
74 | | - (with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))] |
75 | | - (set |
76 | | - (for [{:keys [database tablename], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])] |
77 | | - {:name tablename |
78 | | - :schema (or (not-empty database) |
79 | | - (not-empty table-namespace))})))}) |
| 95 | + [_driver database] |
| 96 | + {:tables (into #{} (describe-database-tables database))}) |
80 | 97 |
|
81 | 98 | ;; Hive describe table result has commented rows to distinguish partitions |
82 | 99 | (defn- valid-describe-table-row? [{:keys [col_name data_type]}] |
|
160 | 177 | ;; the current HiveConnection doesn't support .createStatement |
161 | 178 | (defmethod sql-jdbc.execute/statement-supported? :databricks-sql [_] false) |
162 | 179 |
|
163 | | -(doseq [feature [:basic-aggregations |
164 | | - :binning |
165 | | - :expression-aggregations |
166 | | - :expressions |
167 | | - :native-parameters |
168 | | - :nested-queries |
169 | | - :standard-deviation-aggregations]] |
170 | | - (defmethod driver/supports? [:databricks-sql feature] [_ _] true)) |
| 180 | + |
| 181 | +(doseq [[feature supported?] {:basic-aggregations true |
| 182 | + :binning true |
| 183 | + :expression-aggregations true |
| 184 | + :expressions true |
| 185 | + :full-join true |
| 186 | + :right-join true |
| 187 | + :left-join true |
| 188 | + :inner-join true |
| 189 | + :foreign-keys false |
| 190 | + :native-parameters true |
| 191 | + :nested-queries true |
| 192 | + :standard-deviation-aggregations true |
| 193 | + :metadata/key-constraints false |
| 194 | + :test/jvm-timezone-setting false |
| 195 | + :window-functions/cumulative false}] |
| 196 | + (defmethod driver/database-supports? [:databricks-sql feature] [_driver _feature _db] supported?)) |
171 | 197 |
|
172 | 198 | ;; only define an implementation for `:foreign-keys` if none exists already. In test extensions we define an alternate |
173 | 199 | ;; implementation, and we don't want to stomp over that if it was loaded already |
174 | | -(when-not (get (methods driver/supports?) [:databricks-sql :foreign-keys]) |
175 | | - (defmethod driver/supports? [:databricks-sql :foreign-keys] [_ _] true)) |
| 200 | +(when-not (get (methods driver/database-supports?) [:databricks-sql :foreign-keys]) |
| 201 | + (defmethod driver/database-supports? [:databricks-sql :foreign-keys] [_ _] true)) |
176 | 202 |
|
177 | 203 | (defmethod sql.qp/quote-style :databricks-sql [_] :mysql) |
178 | 204 |
|
|
0 commit comments