Skip to content

Commit f337e46

Browse files
authored
Added annotations for metrics (#324)
* annotations for recording runtime of DatabricksClient and DatabricksMetadataClient commands
1 parent 0112ca9 commit f337e46

17 files changed

+335
-287
lines changed

src/main/java/com/databricks/jdbc/client/DatabricksClient.java

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,31 @@
11
package com.databricks.jdbc.client;
22

33
import com.databricks.jdbc.client.sqlexec.ExternalLink;
4+
import com.databricks.jdbc.commons.CommandName;
45
import com.databricks.jdbc.core.*;
56
import com.databricks.jdbc.core.types.ComputeResource;
7+
import com.databricks.jdbc.driver.IDatabricksConnectionContext;
8+
import com.databricks.jdbc.telemetry.annotation.DatabricksMetricsTimedClass;
9+
import com.databricks.jdbc.telemetry.annotation.DatabricksMetricsTimedMethod;
610
import java.sql.SQLException;
711
import java.util.Collection;
812
import java.util.Map;
913

14+
@DatabricksMetricsTimedClass(
15+
methods = {
16+
@DatabricksMetricsTimedMethod(
17+
methodName = "createSession",
18+
metricName = CommandName.CREATE_SESSION),
19+
@DatabricksMetricsTimedMethod(
20+
methodName = "deleteSession",
21+
metricName = CommandName.DELETE_SESSION),
22+
@DatabricksMetricsTimedMethod(
23+
methodName = "executeStatement",
24+
metricName = CommandName.EXECUTE_STATEMENT),
25+
@DatabricksMetricsTimedMethod(
26+
methodName = "getResultChunks",
27+
metricName = CommandName.GET_RESULT_CHUNKS)
28+
})
1029
/** Interface for Databricks client which abstracts the integration with Databricks server. */
1130
public interface DatabricksClient {
1231

@@ -77,4 +96,6 @@ DatabricksResultSet executeStatement(
7796
*/
7897
Collection<ExternalLink> getResultChunks(String statementId, long chunkIndex)
7998
throws DatabricksSQLException;
99+
100+
IDatabricksConnectionContext getConnectionContext();
80101
}

src/main/java/com/databricks/jdbc/client/DatabricksMetadataClient.java

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,36 @@
11
package com.databricks.jdbc.client;
22

3+
import com.databricks.jdbc.commons.CommandName;
34
import com.databricks.jdbc.core.DatabricksResultSet;
45
import com.databricks.jdbc.core.IDatabricksSession;
6+
import com.databricks.jdbc.telemetry.annotation.DatabricksMetricsTimedClass;
7+
import com.databricks.jdbc.telemetry.annotation.DatabricksMetricsTimedMethod;
58
import java.sql.SQLException;
69

10+
@DatabricksMetricsTimedClass(
11+
methods = {
12+
@DatabricksMetricsTimedMethod(
13+
methodName = "listCatalogs",
14+
metricName = CommandName.LIST_CATALOGS),
15+
@DatabricksMetricsTimedMethod(
16+
methodName = "listSchemas",
17+
metricName = CommandName.LIST_SCHEMAS),
18+
@DatabricksMetricsTimedMethod(
19+
methodName = "listTables",
20+
metricName = CommandName.LIST_TABLES),
21+
@DatabricksMetricsTimedMethod(
22+
methodName = "listTableTypes",
23+
metricName = CommandName.LIST_TABLE_TYPES),
24+
@DatabricksMetricsTimedMethod(
25+
methodName = "listColumns",
26+
metricName = CommandName.LIST_COLUMNS),
27+
@DatabricksMetricsTimedMethod(
28+
methodName = "listFunctions",
29+
metricName = CommandName.LIST_FUNCTIONS),
30+
@DatabricksMetricsTimedMethod(
31+
methodName = "listPrimaryKeys",
32+
metricName = CommandName.LIST_PRIMARY_KEYS)
33+
})
734
public interface DatabricksMetadataClient {
835

936
/** Returns information about types supported by Databricks server */

src/main/java/com/databricks/jdbc/client/impl/sdk/DatabricksNewMetadataSdkClient.java

Lines changed: 13 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -9,16 +9,12 @@
99
import com.databricks.jdbc.client.impl.helper.CommandName;
1010
import com.databricks.jdbc.client.impl.helper.MetadataResultSetBuilder;
1111
import com.databricks.jdbc.commons.LogLevel;
12-
import com.databricks.jdbc.commons.MetricsList;
1312
import com.databricks.jdbc.commons.util.LoggingUtil;
14-
import com.databricks.jdbc.core.*;
1513
import com.databricks.jdbc.core.DatabricksResultSet;
1614
import com.databricks.jdbc.core.IDatabricksSession;
1715
import com.databricks.jdbc.core.ImmutableSqlParameter;
18-
import com.databricks.jdbc.driver.IDatabricksConnectionContext;
1916
import java.sql.ResultSet;
2017
import java.sql.SQLException;
21-
import java.util.*;
2218
import java.util.HashMap;
2319
import java.util.Optional;
2420

@@ -43,38 +39,22 @@ public DatabricksResultSet listTypeInfo(IDatabricksSession session) {
4339

4440
@Override
4541
public DatabricksResultSet listCatalogs(IDatabricksSession session) throws SQLException {
46-
long startTime = System.currentTimeMillis();
4742
CommandBuilder commandBuilder = new CommandBuilder(session);
4843
String SQL = commandBuilder.getSQLString(CommandName.LIST_CATALOGS);
4944
LoggingUtil.log(LogLevel.DEBUG, String.format("SQL command to fetch catalogs: {%s}", SQL));
50-
DatabricksResultSet resultSet =
51-
MetadataResultSetBuilder.getCatalogsResult(
52-
getResultSet(SQL, session, StatementType.METADATA));
53-
IDatabricksConnectionContext connectionContext = session.getConnectionContext();
54-
connectionContext
55-
.getMetricsExporter()
56-
.record(
57-
MetricsList.LIST_CATALOGS_METADATA_SEA.name(), System.currentTimeMillis() - startTime);
58-
return resultSet;
45+
return MetadataResultSetBuilder.getCatalogsResult(
46+
getResultSet(SQL, session, StatementType.METADATA));
5947
}
6048

6149
@Override
6250
public DatabricksResultSet listSchemas(
6351
IDatabricksSession session, String catalog, String schemaNamePattern) throws SQLException {
64-
long startTime = System.currentTimeMillis();
6552
CommandBuilder commandBuilder =
6653
new CommandBuilder(catalog, session).setSchemaPattern(schemaNamePattern);
6754
String SQL = commandBuilder.getSQLString(CommandName.LIST_SCHEMAS);
6855
LoggingUtil.log(LogLevel.DEBUG, String.format("SQL command to fetch schemas: {%s}", SQL));
69-
DatabricksResultSet resultSet =
70-
MetadataResultSetBuilder.getSchemasResult(
71-
getResultSet(SQL, session, StatementType.METADATA), catalog);
72-
IDatabricksConnectionContext connectionContext = session.getConnectionContext();
73-
connectionContext
74-
.getMetricsExporter()
75-
.record(
76-
MetricsList.LIST_SCHEMAS_METADATA_SEA.name(), System.currentTimeMillis() - startTime);
77-
return resultSet;
56+
return MetadataResultSetBuilder.getSchemasResult(
57+
getResultSet(SQL, session, StatementType.METADATA), catalog);
7858
}
7959

8060
@Override
@@ -89,36 +69,19 @@ public DatabricksResultSet listTables(
8969
Optional.ofNullable(tableTypes)
9070
.filter(types -> types.length > 0)
9171
.orElse(DEFAULT_TABLE_TYPES);
92-
long startTime = System.currentTimeMillis();
9372
CommandBuilder commandBuilder =
9473
new CommandBuilder(catalog, session)
9574
.setSchemaPattern(schemaNamePattern)
9675
.setTablePattern(tableNamePattern);
9776
String SQL = commandBuilder.getSQLString(CommandName.LIST_TABLES);
98-
DatabricksResultSet resultSet =
99-
MetadataResultSetBuilder.getTablesResult(
100-
getResultSet(SQL, session, StatementType.METADATA), tableTypes);
101-
IDatabricksConnectionContext connectionContext = session.getConnectionContext();
102-
connectionContext
103-
.getMetricsExporter()
104-
.record(
105-
MetricsList.LIST_TABLES_METADATA_SEA.name(), System.currentTimeMillis() - startTime);
106-
return resultSet;
77+
return MetadataResultSetBuilder.getTablesResult(
78+
getResultSet(SQL, session, StatementType.METADATA), tableTypes);
10779
}
10880

10981
@Override
11082
public DatabricksResultSet listTableTypes(IDatabricksSession session) throws SQLException {
111-
11283
LoggingUtil.log(LogLevel.DEBUG, "Returning list of table types.");
113-
long startTime = System.currentTimeMillis();
114-
DatabricksResultSet resultSet = MetadataResultSetBuilder.getTableTypesResult();
115-
IDatabricksConnectionContext connectionContext = session.getConnectionContext();
116-
connectionContext
117-
.getMetricsExporter()
118-
.record(
119-
MetricsList.LIST_TABLE_TYPES_METADATA_SEA.name(),
120-
System.currentTimeMillis() - startTime);
121-
return resultSet;
84+
return MetadataResultSetBuilder.getTableTypesResult();
12285
}
12386

12487
@Override
@@ -129,21 +92,14 @@ public DatabricksResultSet listColumns(
12992
String tableNamePattern,
13093
String columnNamePattern)
13194
throws SQLException {
132-
long startTime = System.currentTimeMillis();
13395
CommandBuilder commandBuilder =
13496
new CommandBuilder(catalog, session)
13597
.setSchemaPattern(schemaNamePattern)
13698
.setTablePattern(tableNamePattern)
13799
.setColumnPattern(columnNamePattern);
138100
String SQL = commandBuilder.getSQLString(CommandName.LIST_COLUMNS);
139-
DatabricksResultSet resultSet =
140-
MetadataResultSetBuilder.getColumnsResult(getResultSet(SQL, session, StatementType.QUERY));
141-
IDatabricksConnectionContext connectionContext = session.getConnectionContext();
142-
connectionContext
143-
.getMetricsExporter()
144-
.record(
145-
MetricsList.LIST_COLUMNS_METADATA_SEA.name(), System.currentTimeMillis() - startTime);
146-
return resultSet;
101+
return MetadataResultSetBuilder.getColumnsResult(
102+
getResultSet(SQL, session, StatementType.QUERY));
147103
}
148104

149105
@Override
@@ -153,42 +109,25 @@ public DatabricksResultSet listFunctions(
153109
String schemaNamePattern,
154110
String functionNamePattern)
155111
throws SQLException {
156-
long startTime = System.currentTimeMillis();
157112
CommandBuilder commandBuilder =
158113
new CommandBuilder(catalog, session)
159114
.setSchemaPattern(schemaNamePattern)
160115
.setFunctionPattern(functionNamePattern);
161116
String SQL = commandBuilder.getSQLString(CommandName.LIST_FUNCTIONS);
162117
LoggingUtil.log(LogLevel.DEBUG, String.format("SQL command to fetch functions: {%s}", SQL));
163-
DatabricksResultSet resultSet =
164-
MetadataResultSetBuilder.getFunctionsResult(
165-
getResultSet(SQL, session, StatementType.QUERY), catalog);
166-
IDatabricksConnectionContext connectionContext = session.getConnectionContext();
167-
connectionContext
168-
.getMetricsExporter()
169-
.record(
170-
MetricsList.LIST_FUNCTIONS_METADATA_SEA.name(), System.currentTimeMillis() - startTime);
171-
return resultSet;
118+
return MetadataResultSetBuilder.getFunctionsResult(
119+
getResultSet(SQL, session, StatementType.QUERY), catalog);
172120
}
173121

174122
@Override
175123
public DatabricksResultSet listPrimaryKeys(
176124
IDatabricksSession session, String catalog, String schema, String table) throws SQLException {
177-
long startTime = System.currentTimeMillis();
178125
CommandBuilder commandBuilder =
179126
new CommandBuilder(catalog, session).setSchema(schema).setTable(table);
180127
String SQL = commandBuilder.getSQLString(CommandName.LIST_PRIMARY_KEYS);
181128
LoggingUtil.log(LogLevel.DEBUG, String.format("SQL command to fetch primary keys: {%s}", SQL));
182-
DatabricksResultSet resultSet =
183-
MetadataResultSetBuilder.getPrimaryKeysResult(
184-
getResultSet(SQL, session, StatementType.METADATA));
185-
IDatabricksConnectionContext connectionContext = session.getConnectionContext();
186-
connectionContext
187-
.getMetricsExporter()
188-
.record(
189-
MetricsList.LIST_PRIMARY_KEYS_METADATA_SEA.name(),
190-
System.currentTimeMillis() - startTime);
191-
return resultSet;
129+
return MetadataResultSetBuilder.getPrimaryKeysResult(
130+
getResultSet(SQL, session, StatementType.METADATA));
192131
}
193132

194133
private ResultSet getResultSet(

src/main/java/com/databricks/jdbc/client/impl/sdk/DatabricksSdkClient.java

Lines changed: 21 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
import com.databricks.jdbc.client.sqlexec.GetStatementResponse;
1717
import com.databricks.jdbc.client.sqlexec.ResultData;
1818
import com.databricks.jdbc.commons.LogLevel;
19-
import com.databricks.jdbc.commons.MetricsList;
2019
import com.databricks.jdbc.commons.util.LoggingUtil;
2120
import com.databricks.jdbc.core.*;
2221
import com.databricks.jdbc.core.types.ComputeResource;
@@ -42,6 +41,11 @@ public class DatabricksSdkClient implements DatabricksClient {
4241
private final DatabricksConfig databricksConfig;
4342
private final WorkspaceClient workspaceClient;
4443

44+
@Override
45+
public IDatabricksConnectionContext getConnectionContext() {
46+
return connectionContext;
47+
}
48+
4549
private static Map<String, String> getHeaders() {
4650
return Map.of(
4751
"Accept", "application/json",
@@ -82,7 +86,6 @@ public ImmutableSessionInfo createSession(
8286
String.format(
8387
"public Session createSession(String warehouseId = {%s}, String catalog = {%s}, String schema = {%s}, Map<String, String> sessionConf = {%s})",
8488
((Warehouse) warehouse).getWarehouseId(), catalog, schema, sessionConf));
85-
long startTime = System.currentTimeMillis();
8689
CreateSessionRequest request =
8790
new CreateSessionRequest().setWarehouseId(((Warehouse) warehouse).getWarehouseId());
8891
if (catalog != null) {
@@ -99,21 +102,15 @@ public ImmutableSessionInfo createSession(
99102
.apiClient()
100103
.POST(SESSION_PATH, request, CreateSessionResponse.class, getHeaders());
101104

102-
ImmutableSessionInfo sessionInfo =
103-
ImmutableSessionInfo.builder()
104-
.computeResource(warehouse)
105-
.sessionId(createSessionResponse.getSessionId())
106-
.build();
107-
connectionContext
108-
.getMetricsExporter()
109-
.record(MetricsList.CREATE_SESSION.name(), System.currentTimeMillis() - startTime);
110-
return sessionInfo;
105+
return ImmutableSessionInfo.builder()
106+
.computeResource(warehouse)
107+
.sessionId(createSessionResponse.getSessionId())
108+
.build();
111109
}
112110

113111
@Override
114112
public void deleteSession(IDatabricksSession session, ComputeResource warehouse)
115113
throws DatabricksSQLException {
116-
long startTime = System.currentTimeMillis();
117114
LoggingUtil.log(
118115
LogLevel.DEBUG,
119116
String.format(
@@ -125,9 +122,6 @@ public void deleteSession(IDatabricksSession session, ComputeResource warehouse)
125122
String path = String.format(DELETE_SESSION_PATH_WITH_ID, request.getSessionId());
126123
Map<String, String> headers = new HashMap<>();
127124
workspaceClient.apiClient().DELETE(path, request, Void.class, headers);
128-
connectionContext
129-
.getMetricsExporter()
130-
.record(MetricsList.DELETE_SESSION.name(), System.currentTimeMillis() - startTime);
131125
}
132126

133127
@Override
@@ -139,7 +133,6 @@ public DatabricksResultSet executeStatement(
139133
IDatabricksSession session,
140134
IDatabricksStatement parentStatement)
141135
throws SQLException {
142-
long startTime = System.currentTimeMillis();
143136
LoggingUtil.log(
144137
LogLevel.DEBUG,
145138
String.format(
@@ -192,19 +185,14 @@ public DatabricksResultSet executeStatement(
192185
if (responseState != StatementState.SUCCEEDED) {
193186
handleFailedExecution(response, statementId, sql);
194187
}
195-
DatabricksResultSet resultSet =
196-
new DatabricksResultSet(
197-
response.getStatus(),
198-
statementId,
199-
response.getResult(),
200-
response.getManifest(),
201-
statementType,
202-
session,
203-
parentStatement);
204-
connectionContext
205-
.getMetricsExporter()
206-
.record(MetricsList.EXECUTE_STATEMENT.name(), System.currentTimeMillis() - startTime);
207-
return resultSet;
188+
return new DatabricksResultSet(
189+
response.getStatus(),
190+
statementId,
191+
response.getResult(),
192+
response.getManifest(),
193+
statementType,
194+
session,
195+
parentStatement);
208196
}
209197

210198
private boolean useCloudFetchForResult(StatementType statementType) {
@@ -239,19 +227,13 @@ public Collection<ExternalLink> getResultChunks(String statementId, long chunkIn
239227
String.format(
240228
"public Optional<ExternalLink> getResultChunk(String statementId = {%s}, long chunkIndex = {%s})",
241229
statementId, chunkIndex));
242-
long startTime = System.currentTimeMillis();
243230
GetStatementResultChunkNRequest request =
244231
new GetStatementResultChunkNRequest().setStatementId(statementId).setChunkIndex(chunkIndex);
245232
String path = String.format(RESULT_CHUNK_PATH, statementId, chunkIndex);
246-
Collection<ExternalLink> chunkLinks =
247-
workspaceClient
248-
.apiClient()
249-
.GET(path, request, ResultData.class, getHeaders())
250-
.getExternalLinks();
251-
connectionContext
252-
.getMetricsExporter()
253-
.record(MetricsList.GET_RESULT_CHUNKS.name(), System.currentTimeMillis() - startTime);
254-
return chunkLinks;
233+
return workspaceClient
234+
.apiClient()
235+
.GET(path, request, ResultData.class, getHeaders())
236+
.getExternalLinks();
255237
}
256238

257239
private ExecuteStatementRequest getRequest(
@@ -262,7 +244,6 @@ private ExecuteStatementRequest getRequest(
262244
Map<Integer, ImmutableSqlParameter> parameters,
263245
IDatabricksStatement parentStatement)
264246
throws SQLException {
265-
long startTime = System.currentTimeMillis();
266247
Format format = useCloudFetchForResult(statementType) ? Format.ARROW_STREAM : Format.JSON_ARRAY;
267248
Disposition disposition =
268249
useCloudFetchForResult(statementType) ? Disposition.EXTERNAL_LINKS : Disposition.INLINE;
@@ -284,9 +265,6 @@ private ExecuteStatementRequest getRequest(
284265
if (maxRows != DEFAULT_ROW_LIMIT) {
285266
request.setRowLimit(maxRows);
286267
}
287-
connectionContext
288-
.getMetricsExporter()
289-
.record(MetricsList.GET_REQUEST.name(), System.currentTimeMillis() - startTime);
290268
return request;
291269
}
292270

0 commit comments

Comments
 (0)