22import streamlit as st
33from databricks import sql
44from databricks .sdk .core import Config
5+ from databricks .sdk import WorkspaceClient
56
67
78st .header (body = "Tables" , divider = True )
1415
1516cfg = Config ()
1617
18+ w = WorkspaceClient ()
19+
20+ warehouses = w .warehouses .list ()
21+
22+ warehouse_paths = {wh .name : wh .odbc_params .path for wh in warehouses }
23+
24+ catalogs = w .catalogs .list ()
25+
1726
1827@st .cache_resource
1928def get_connection (http_path ):
@@ -24,12 +33,23 @@ def get_connection(http_path):
2433 )
2534
2635
27- def read_table (table_name : str , conn ) -> pd . DataFrame :
36+ def read_table (table_name , conn ):
2837 with conn .cursor () as cursor :
29- cursor .execute (f"SELECT * FROM { table_name } " )
38+ query = f"SELECT * FROM { table_name } "
39+ cursor .execute (query )
3040 return cursor .fetchall_arrow ().to_pandas ()
3141
3242
43+ def get_schema_names (catalog_name ):
44+ schemas = w .schemas .list (catalog_name = catalog_name )
45+ return [schema .name for schema in schemas ]
46+
47+
48+ def get_table_names (catalog_name , schema_name ):
49+ tables = w .tables .list (catalog_name = catalog_name , schema_name = schema_name )
50+ return [table .name for table in tables ]
51+
52+
3353def insert_overwrite_table (table_name : str , df : pd .DataFrame , conn ):
3454 progress = st .empty ()
3555 with conn .cursor () as cursor :
@@ -45,26 +65,40 @@ def insert_overwrite_table(table_name: str, df: pd.DataFrame, conn):
4565tab_a , tab_b , tab_c = st .tabs (["**Try it**" , "**Code snippet**" , "**Requirements**" ])
4666
4767with tab_a :
48- http_path_input = st .text_input (
49- "Specify the HTTP Path to your Databricks SQL Warehouse:" ,
50- placeholder = "/sql/1.0/warehouses/xxxxxx" ,
68+ http_path_input = st .selectbox (
69+ "Select a SQL warehouse:" , ["" ] + list (warehouse_paths .keys ())
5170 )
5271
53- table_name = st .text_input (
54- "Specify a Catalog table name :" , placeholder = " catalog.schema.table"
72+ catalog_name = st .selectbox (
73+ "Select a catalog :" , [ "" ] + [ catalog .name for catalog in catalogs ]
5574 )
5675
57- if http_path_input and table_name :
58- conn = get_connection (http_path_input )
59- original_df = read_table (table_name , conn )
60- edited_df = st .data_editor (original_df , num_rows = "dynamic" , hide_index = True )
76+ if catalog_name and catalog_name != "" :
77+ schema_names = get_schema_names (catalog_name )
78+ schema_name = st .selectbox ("Select a schema:" , ["" ] + schema_names )
79+
80+ if catalog_name and catalog_name != "" and schema_name and schema_name != "" :
81+ table_names = get_table_names (catalog_name , schema_name )
82+ table_name = st .selectbox ("Select a table:" , ["" ] + table_names )
83+
84+ in_table_name = f"{ catalog_name } .{ schema_name } .{ table_name } "
85+
86+ if (
87+ http_path_input
88+ and table_name
89+ and catalog_name
90+ and schema_name
91+ and table_name != ""
92+ ):
93+ http_path = warehouse_paths [http_path_input ]
94+ conn = get_connection (http_path )
95+ original_df = read_table (in_table_name , conn )
96+ edited_df = st .data_editor (original_df , num_rows = "dynamic" , hide_index = True )
6197
62- df_diff = pd .concat ([original_df , edited_df ]).drop_duplicates (keep = False )
63- if not df_diff .empty :
64- if st .button ("Save changes" ):
65- insert_overwrite_table (table_name , edited_df , conn )
66- else :
67- st .warning ("Provide both the warehouse path and a table name to load data." )
98+ df_diff = pd .concat ([original_df , edited_df ]).drop_duplicates (keep = False )
99+ if not df_diff .empty :
100+ if st .button ("Save changes" ):
101+ insert_overwrite_table (in_table_name , edited_df , conn )
68102
69103
70104with tab_b :
@@ -75,6 +109,7 @@ def insert_overwrite_table(table_name: str, df: pd.DataFrame, conn):
75109 from databricks import sql
76110 from databricks.sdk.core import Config
77111
112+
78113 cfg = Config() # Set the DATABRICKS_HOST environment variable when running locally
79114
80115
0 commit comments