Skip to content

Commit ce10eb2

Browse files
zacdav-dbZac Davies
andauthored
Moving arrow to suggests - it's only required if a non-default option is used for a the DBSQL connector via reticulate. (#94)
Co-authored-by: Zac Davies <[email protected]>
1 parent 6ae7689 commit ce10eb2

File tree

3 files changed

+69
-36
lines changed

3 files changed

+69
-36
lines changed

DESCRIPTION

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
Package: brickster
22
Title: R Toolkit for 'Databricks'
3-
Version: 0.2.7
3+
Version: 0.2.8
44
Authors@R:
55
c(
66
person(given = "Zac",
@@ -23,7 +23,6 @@ LazyData: true
2323
Depends:
2424
R (>= 4.1.0)
2525
Imports:
26-
arrow,
2726
base64enc,
2827
cli,
2928
curl,
@@ -39,6 +38,7 @@ Imports:
3938
tibble,
4039
utils
4140
Suggests:
41+
arrow,
4242
testthat (>= 3.0.0),
4343
huxtable,
4444
htmltools,

NAMESPACE

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,6 @@ export(spark_python_task)
229229
export(spark_submit_task)
230230
export(wait_for_lib_installs)
231231
import(R6)
232-
import(arrow)
233232
import(cli)
234233
import(httr2)
235234
import(tibble)

R/sql-connector.R

Lines changed: 67 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,11 @@
1111
#'
1212
#' @examples
1313
#' \dontrun{install_db_sql_connector()}
14-
install_db_sql_connector <- function(envname = determine_brickster_venv(),
15-
method = "auto", ...) {
14+
install_db_sql_connector <- function(
15+
envname = determine_brickster_venv(),
16+
method = "auto",
17+
...
18+
) {
1619
reticulate::py_install(
1720
"databricks-sql-connector",
1821
envname = envname,
@@ -50,22 +53,24 @@ install_db_sql_connector <- function(envname = determine_brickster_venv(),
5053
#' @inheritParams db_sql_exec_query
5154
#' @inheritParams auth_params
5255
#'
53-
#' @import arrow
5456
#' @returns [DatabricksSqlClient()]
5557
#' @examples
5658
#' \dontrun{
5759
#' client <- db_sql_client(id = "<warehouse_id>", use_cloud_fetch = TRUE)
5860
#' }
5961
#' @export
60-
db_sql_client <- function(id,
61-
catalog = NULL, schema = NULL,
62-
compute_type = c("warehouse", "cluster"),
63-
use_cloud_fetch = FALSE,
64-
session_configuration = list(),
65-
host = db_host(), token = db_token(),
66-
workspace_id = db_current_workspace_id(),
67-
...) {
68-
62+
db_sql_client <- function(
63+
id,
64+
catalog = NULL,
65+
schema = NULL,
66+
compute_type = c("warehouse", "cluster"),
67+
use_cloud_fetch = FALSE,
68+
session_configuration = list(),
69+
host = db_host(),
70+
token = db_token(),
71+
workspace_id = db_current_workspace_id(),
72+
...
73+
) {
6974
compute_type <- match.arg(compute_type)
7075
http_path <- generate_http_path(
7176
id = id,
@@ -83,7 +88,6 @@ db_sql_client <- function(id,
8388
session_configuration = session_configuration,
8489
...
8590
)
86-
8791
}
8892

8993
#' @title Databricks SQL Connector
@@ -99,7 +103,6 @@ db_sql_client <- function(id,
99103
DatabricksSqlClient <- R6::R6Class(
100104
classname = "db_sql_client",
101105
public = list(
102-
103106
#' @description
104107
#' Creates a new instance of this [R6][R6::R6Class] class.
105108
#'
@@ -121,11 +124,16 @@ DatabricksSqlClient <- R6::R6Class(
121124
#' See [db_sql_client()].
122125
#' @param ... Parameters passed to [connection method](https://docs.databricks.com/en/dev-tools/python-sql-connector.html#methods)
123126
#' @return [DatabricksSqlClient].
124-
initialize = function(host, token, http_path,
125-
catalog, schema,
126-
use_cloud_fetch, session_configuration,
127-
...) {
128-
127+
initialize = function(
128+
host,
129+
token,
130+
http_path,
131+
catalog,
132+
schema,
133+
use_cloud_fetch,
134+
session_configuration,
135+
...
136+
) {
129137
private$connection <- py_db_sql_connector$connect(
130138
server_hostname = host,
131139
access_token = token,
@@ -160,9 +168,13 @@ DatabricksSqlClient <- R6::R6Class(
160168
#' client$columns(catalog_name = "default", table_name = "gold_%")
161169
#' }
162170
#' @return [tibble::tibble] or [arrow::Table].
163-
columns = function(catalog_name = NULL, schema_name = NULL,
164-
table_name = NULL, column_name = NULL,
165-
as_tibble = TRUE) {
171+
columns = function(
172+
catalog_name = NULL,
173+
schema_name = NULL,
174+
table_name = NULL,
175+
column_name = NULL,
176+
as_tibble = TRUE
177+
) {
166178
cursor <- private$connection$cursor()
167179
on.exit(cursor$close())
168180
cursor$columns(
@@ -209,8 +221,11 @@ DatabricksSqlClient <- R6::R6Class(
209221
#' client$schemas(catalog_name = "main")
210222
#' }
211223
#' @return [tibble::tibble] or [arrow::Table].
212-
schemas = function(catalog_name = NULL, schema_name = NULL,
213-
as_tibble = TRUE) {
224+
schemas = function(
225+
catalog_name = NULL,
226+
schema_name = NULL,
227+
as_tibble = TRUE
228+
) {
214229
cursor <- private$connection$cursor()
215230
on.exit(cursor$close())
216231
cursor$schemas(
@@ -238,9 +253,13 @@ DatabricksSqlClient <- R6::R6Class(
238253
#' If `TRUE` (default) will return [tibble::tibble], otherwise returns
239254
#' [arrow::Table].
240255
#' @return [tibble::tibble] or [arrow::Table].
241-
tables = function(catalog_name = NULL, schema_name = NULL,
242-
table_name = NULL, table_types = NULL,
243-
as_tibble = TRUE) {
256+
tables = function(
257+
catalog_name = NULL,
258+
schema_name = NULL,
259+
table_name = NULL,
260+
table_types = NULL,
261+
as_tibble = TRUE
262+
) {
244263
cursor <- private$connection$cursor()
245264
on.exit(cursor$close())
246265
cursor$tables(
@@ -307,8 +326,11 @@ DatabricksSqlClient <- R6::R6Class(
307326
#' )
308327
#'}
309328
#' @return [tibble::tibble] or [arrow::Table].
310-
execute_many = function(operation, seq_of_parameters = NULL,
311-
as_tibble = TRUE) {
329+
execute_many = function(
330+
operation,
331+
seq_of_parameters = NULL,
332+
as_tibble = TRUE
333+
) {
312334
cursor <- private$connection$cursor()
313335
on.exit(cursor$close())
314336
cursor$executemany(
@@ -317,7 +339,6 @@ DatabricksSqlClient <- R6::R6Class(
317339
)
318340
handle_results(cursor$fetchall_arrow(), as_tibble)
319341
}
320-
321342
),
322343
private = list(
323344
connection = NULL,
@@ -328,8 +349,11 @@ DatabricksSqlClient <- R6::R6Class(
328349
)
329350
)
330351

331-
generate_http_path <- function(id, is_warehouse = TRUE,
332-
workspace_id = db_current_workspace_id()) {
352+
generate_http_path <- function(
353+
id,
354+
is_warehouse = TRUE,
355+
workspace_id = db_current_workspace_id()
356+
) {
333357
if (is_warehouse) {
334358
paste0("/sql/1.0/warehouses/", id)
335359
} else {
@@ -338,8 +362,18 @@ generate_http_path <- function(id, is_warehouse = TRUE,
338362
}
339363

340364
handle_results <- function(x, as_tibble) {
365+
# if
341366
if (as_tibble) {
342-
x <- dplyr::collect(x)
367+
if (rlang::is_installed("arrow")) {
368+
x <- dplyr::collect(x)
369+
} else {
370+
x <- tibble::as_tibble(x$to_pandas())
371+
}
372+
} else {
373+
rlang::check_installed(
374+
pkg = "arrow",
375+
reason = "to return results as an arrow Table"
376+
)
343377
}
344378
x
345379
}

0 commit comments

Comments
 (0)