Skip to content

Commit 957cb31

Browse files
authored
Remove dead telemetry code (#540)
* Remove telemetry * Remove todos
1 parent 7667617 commit 957cb31

25 files changed

+6
-854
lines changed

src/main/java/com/databricks/client/jdbc/Driver.java

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,6 @@ public Connection connect(String url, Properties info) throws DatabricksSQLExcep
5454
try {
5555
connection.open();
5656
isConnectionOpen = true;
57-
// TODO (PECO-1957): Export properties asynchronously
58-
// DeviceInfoLogUtil.exportDeviceProperties(connection.getSession());
5957
resolveMetadataClient(connection, connectionContext);
6058
return connection;
6159
} catch (Exception e) {
@@ -74,8 +72,6 @@ public Connection connect(String url, Properties info) throws DatabricksSQLExcep
7472
errorMessage += e.getMessage();
7573
}
7674

77-
MetricsUtil.exportErrorWithoutAuth(
78-
ErrorTypes.COMMUNICATION_FAILURE, null, ErrorCodes.COMMUNICATION_FAILURE);
7975
throw new DatabricksSQLException(
8076
errorMessage,
8177
rootCause,

src/main/java/com/databricks/jdbc/api/IDatabricksConnectionContext.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,8 +157,6 @@ public static AuthMech parseAuthMech(String authMech) {
157157

158158
boolean supportManyParameters();
159159

160-
boolean enableTelemetry();
161-
162160
String getConnectionURL();
163161

164162
boolean checkCertificateRevocation();

src/main/java/com/databricks/jdbc/api/IDatabricksSession.java

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
import com.databricks.jdbc.dbclient.IDatabricksClient;
77
import com.databricks.jdbc.dbclient.IDatabricksMetadataClient;
88
import com.databricks.jdbc.exception.DatabricksSQLException;
9-
import com.databricks.jdbc.telemetry.DatabricksMetrics;
109
import java.util.Map;
1110
import javax.annotation.Nullable;
1211

@@ -84,7 +83,4 @@ public interface IDatabricksSession {
8483
IDatabricksConnectionContext getConnectionContext();
8584

8685
void setEmptyMetadataClient();
87-
88-
/** Returns the metrics exporter for the session */
89-
DatabricksMetrics getMetricsExporter();
9086
}

src/main/java/com/databricks/jdbc/api/impl/DatabricksConnectionContext.java

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -479,11 +479,6 @@ public boolean supportManyParameters() {
479479
return getParameter(DatabricksJdbcUrlParams.SUPPORT_MANY_PARAMETERS).equals("1");
480480
}
481481

482-
@Override
483-
public boolean enableTelemetry() {
484-
return Objects.equals(getParameter(DatabricksJdbcUrlParams.ENABLE_TELEMETRY), "1");
485-
}
486-
487482
@Override
488483
public String getConnectionURL() {
489484
return connectionURL;

src/main/java/com/databricks/jdbc/api/impl/DatabricksSession.java

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,6 @@
1515
import com.databricks.jdbc.exception.DatabricksSQLException;
1616
import com.databricks.jdbc.log.JdbcLogger;
1717
import com.databricks.jdbc.log.JdbcLoggerFactory;
18-
import com.databricks.jdbc.telemetry.DatabricksMetrics;
19-
import com.databricks.jdbc.telemetry.annotation.DatabricksMetricsTimedProcessor;
2018
import com.databricks.sdk.support.ToStringer;
2119
import com.google.common.annotations.VisibleForTesting;
2220
import java.util.HashMap;
@@ -44,7 +42,6 @@ public class DatabricksSession implements IDatabricksSession {
4442
private final Map<String, String> clientInfoProperties;
4543
private final CompressionType compressionType;
4644
private final IDatabricksConnectionContext connectionContext;
47-
private final DatabricksMetrics metricsExporter;
4845

4946
/**
5047
* Creates an instance of Databricks session for given connection context
@@ -68,9 +65,6 @@ public DatabricksSession(IDatabricksConnectionContext connectionContext)
6865
this.clientInfoProperties = new HashMap<>();
6966
this.compressionType = connectionContext.getCompressionType();
7067
this.connectionContext = connectionContext;
71-
this.metricsExporter = new DatabricksMetrics(connectionContext);
72-
this.databricksClient =
73-
DatabricksMetricsTimedProcessor.createProxy(this.databricksClient, metricsExporter);
7468
}
7569

7670
/** Constructor method to be used for mocking in a test case. */
@@ -89,7 +83,6 @@ public DatabricksSession(
8983
this.sessionConfigs = connectionContext.getSessionConfigs();
9084
this.clientInfoProperties = new HashMap<>();
9185
this.compressionType = connectionContext.getCompressionType();
92-
this.metricsExporter = new DatabricksMetrics(connectionContext);
9386
this.connectionContext = connectionContext;
9487
}
9588

@@ -147,7 +140,6 @@ public void close() throws DatabricksSQLException {
147140
databricksClient.deleteSession(this, computeResource);
148141
this.sessionInfo = null;
149142
this.isSessionOpen = false;
150-
this.getMetricsExporter().close();
151143
}
152144
}
153145
}
@@ -244,8 +236,4 @@ public IDatabricksConnectionContext getConnectionContext() {
244236
public void setEmptyMetadataClient() {
245237
databricksMetadataClient = new DatabricksEmptyMetadataClient();
246238
}
247-
248-
public DatabricksMetrics getMetricsExporter() {
249-
return this.metricsExporter;
250-
}
251239
}

src/main/java/com/databricks/jdbc/api/impl/DatabricksStatement.java

Lines changed: 0 additions & 96 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
import com.databricks.jdbc.api.IDatabricksStatement;
1010
import com.databricks.jdbc.api.callback.IDatabricksStatementHandle;
1111
import com.databricks.jdbc.common.ErrorCodes;
12-
import com.databricks.jdbc.common.ErrorTypes;
1312
import com.databricks.jdbc.common.StatementType;
1413
import com.databricks.jdbc.common.util.*;
1514
import com.databricks.jdbc.dbclient.IDatabricksClient;
@@ -61,11 +60,6 @@ public ResultSet executeQuery(String sql) throws SQLException {
6160
+ sql
6261
+ ". However, query "
6362
+ "execution was successful.";
64-
MetricsUtil.exportError(
65-
connection.getSession(),
66-
ErrorTypes.EXECUTE_STATEMENT,
67-
statementId,
68-
ErrorCodes.RESULT_SET_ERROR);
6963
throw new DatabricksSQLException(errorMessage, ErrorCodes.RESULT_SET_ERROR);
7064
}
7165
return rs;
@@ -107,23 +101,13 @@ public void close(boolean removeFromSession) throws DatabricksSQLException {
107101
@Override
108102
public int getMaxFieldSize() throws SQLException {
109103
LOGGER.debug("public int getMaxFieldSize()");
110-
MetricsUtil.exportError(
111-
connection.getSession(),
112-
ErrorTypes.FEATURE_NOT_SUPPORTED,
113-
statementId,
114-
ErrorCodes.MAX_FIELD_SIZE_EXCEEDED);
115104
throw new DatabricksSQLFeatureNotSupportedException(
116105
"Not implemented in DatabricksStatement - getMaxFieldSize()");
117106
}
118107

119108
@Override
120109
public void setMaxFieldSize(int max) throws SQLException {
121110
LOGGER.debug(String.format("public void setMaxFieldSize(int max = {%s})", max));
122-
MetricsUtil.exportError(
123-
connection.getSession(),
124-
ErrorTypes.FEATURE_NOT_SUPPORTED,
125-
statementId,
126-
ErrorCodes.MAX_FIELD_SIZE_EXCEEDED);
127111
throw new DatabricksSQLFeatureNotSupportedException(
128112
"Not implemented in DatabricksStatement - setMaxFieldSize(int max)");
129113
}
@@ -192,11 +176,6 @@ public void clearWarnings() {
192176
@Override
193177
public void setCursorName(String name) throws SQLException {
194178
LOGGER.debug(String.format("public void setCursorName(String name = {%s})", name));
195-
MetricsUtil.exportError(
196-
connection.getSession(),
197-
ErrorTypes.FEATURE_NOT_SUPPORTED,
198-
statementId,
199-
ErrorCodes.CURSOR_NAME_NOT_FOUND);
200179
throw new DatabricksSQLFeatureNotSupportedException(
201180
"Not implemented in DatabricksStatement - setCursorName(String name)");
202181
}
@@ -226,11 +205,6 @@ public int getUpdateCount() throws SQLException {
226205
@Override
227206
public boolean getMoreResults() throws SQLException {
228207
LOGGER.debug("public boolean getMoreResults()");
229-
MetricsUtil.exportError(
230-
connection.getSession(),
231-
ErrorTypes.FEATURE_NOT_SUPPORTED,
232-
statementId,
233-
ErrorCodes.MORE_RESULTS_UNSUPPORTED);
234208
throw new DatabricksSQLFeatureNotSupportedException(
235209
"Not implemented in DatabricksStatement - getMoreResults()");
236210
}
@@ -240,11 +214,6 @@ public void setFetchDirection(int direction) throws SQLException {
240214
LOGGER.debug(String.format("public void setFetchDirection(int direction = {%s})", direction));
241215
checkIfClosed();
242216
if (direction != ResultSet.FETCH_FORWARD) {
243-
MetricsUtil.exportError(
244-
connection.getSession(),
245-
ErrorTypes.FEATURE_NOT_SUPPORTED,
246-
statementId,
247-
ErrorCodes.UNSUPPORTED_FETCH_FORWARD);
248217
throw new DatabricksSQLFeatureNotSupportedException("Not supported: ResultSet.FetchForward");
249218
}
250219
}
@@ -297,11 +266,6 @@ public int getResultSetType() throws SQLException {
297266
public void addBatch(String sql) throws SQLException {
298267
LOGGER.debug(String.format("public void addBatch(String sql = {%s})", sql));
299268
checkIfClosed();
300-
MetricsUtil.exportError(
301-
connection.getSession(),
302-
ErrorTypes.FEATURE_NOT_SUPPORTED,
303-
statementId,
304-
ErrorCodes.BATCH_OPERATION_UNSUPPORTED);
305269
throw new DatabricksSQLFeatureNotSupportedException(
306270
"Method not supported: addBatch(String sql)");
307271
}
@@ -310,23 +274,13 @@ public void addBatch(String sql) throws SQLException {
310274
public void clearBatch() throws SQLException {
311275
LOGGER.debug("public void clearBatch()");
312276
checkIfClosed();
313-
MetricsUtil.exportError(
314-
connection.getSession(),
315-
ErrorTypes.FEATURE_NOT_SUPPORTED,
316-
statementId,
317-
ErrorCodes.BATCH_OPERATION_UNSUPPORTED);
318277
throw new DatabricksSQLFeatureNotSupportedException("Method not supported: clearBatch()");
319278
}
320279

321280
@Override
322281
public int[] executeBatch() throws SQLException {
323282
LOGGER.debug("public int[] executeBatch()");
324283
checkIfClosed();
325-
MetricsUtil.exportError(
326-
connection.getSession(),
327-
ErrorTypes.FEATURE_NOT_SUPPORTED,
328-
statementId,
329-
ErrorCodes.BATCH_OPERATION_UNSUPPORTED);
330284
throw new DatabricksSQLFeatureNotSupportedException("Method not supported: executeBatch()");
331285
}
332286

@@ -339,11 +293,6 @@ public Connection getConnection() throws SQLException {
339293
@Override
340294
public boolean getMoreResults(int current) throws SQLException {
341295
LOGGER.debug(String.format("public boolean getMoreResults(int current = {%s})", current));
342-
MetricsUtil.exportError(
343-
connection.getSession(),
344-
ErrorTypes.FEATURE_NOT_SUPPORTED,
345-
statementId,
346-
ErrorCodes.MORE_RESULTS_UNSUPPORTED);
347296
throw new DatabricksSQLFeatureNotSupportedException(
348297
"Not implemented in DatabricksStatement - getMoreResults(int current)");
349298
}
@@ -361,11 +310,6 @@ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException
361310
if (autoGeneratedKeys == Statement.NO_GENERATED_KEYS) {
362311
return executeUpdate(sql);
363312
} else {
364-
MetricsUtil.exportError(
365-
connection.getSession(),
366-
ErrorTypes.FEATURE_NOT_SUPPORTED,
367-
statementId,
368-
ErrorCodes.EXECUTE_METHOD_UNSUPPORTED);
369313
throw new DatabricksSQLFeatureNotSupportedException(
370314
"Method not supported: executeUpdate(String sql, int autoGeneratedKeys)");
371315
}
@@ -374,11 +318,6 @@ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException
374318
@Override
375319
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
376320
checkIfClosed();
377-
MetricsUtil.exportError(
378-
connection.getSession(),
379-
ErrorTypes.FEATURE_NOT_SUPPORTED,
380-
statementId,
381-
ErrorCodes.EXECUTE_METHOD_UNSUPPORTED);
382321
throw new DatabricksSQLFeatureNotSupportedException(
383322
"Method not supported: executeUpdate(String sql, int[] columnIndexes)");
384323
}
@@ -387,11 +326,6 @@ public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
387326
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
388327
LOGGER.debug("public int executeUpdate(String sql, String[] columnNames)");
389328
checkIfClosed();
390-
MetricsUtil.exportError(
391-
connection.getSession(),
392-
ErrorTypes.FEATURE_NOT_SUPPORTED,
393-
statementId,
394-
ErrorCodes.EXECUTE_METHOD_UNSUPPORTED);
395329
throw new DatabricksSQLFeatureNotSupportedException(
396330
"Method not supported: executeUpdate(String sql, String[] columnNames)");
397331
}
@@ -402,11 +336,6 @@ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
402336
if (autoGeneratedKeys == Statement.NO_GENERATED_KEYS) {
403337
return execute(sql);
404338
} else {
405-
MetricsUtil.exportError(
406-
connection.getSession(),
407-
ErrorTypes.FEATURE_NOT_SUPPORTED,
408-
statementId,
409-
ErrorCodes.EXECUTE_METHOD_UNSUPPORTED);
410339
throw new DatabricksSQLFeatureNotSupportedException(
411340
"Method not supported: execute(String sql, int autoGeneratedKeys)");
412341
}
@@ -415,23 +344,13 @@ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
415344
@Override
416345
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
417346
checkIfClosed();
418-
MetricsUtil.exportError(
419-
connection.getSession(),
420-
ErrorTypes.FEATURE_NOT_SUPPORTED,
421-
statementId,
422-
ErrorCodes.EXECUTE_METHOD_UNSUPPORTED);
423347
throw new DatabricksSQLFeatureNotSupportedException(
424348
"Method not supported: execute(String sql, int[] columnIndexes)");
425349
}
426350

427351
@Override
428352
public boolean execute(String sql, String[] columnNames) throws SQLException {
429353
checkIfClosed();
430-
MetricsUtil.exportError(
431-
connection.getSession(),
432-
ErrorTypes.FEATURE_NOT_SUPPORTED,
433-
statementId,
434-
ErrorCodes.EXECUTE_METHOD_UNSUPPORTED);
435354
throw new DatabricksSQLFeatureNotSupportedException(
436355
"Method not supported: execute(String sql, String[] columnNames)");
437356
}
@@ -453,11 +372,6 @@ public void setPoolable(boolean poolable) throws SQLException {
453372
LOGGER.debug(String.format("public void setPoolable(boolean poolable = {%s})", poolable));
454373
checkIfClosed();
455374
if (poolable) {
456-
MetricsUtil.exportError(
457-
connection.getSession(),
458-
ErrorTypes.FEATURE_NOT_SUPPORTED,
459-
statementId,
460-
ErrorCodes.POOLABLE_METHOD_UNSUPPORTED);
461375
throw new DatabricksSQLFeatureNotSupportedException(
462376
"Method not supported: setPoolable(boolean poolable)");
463377
}
@@ -538,11 +452,6 @@ DatabricksResultSet executeInternal(
538452
stackTraceMessage, statementId);
539453
LOGGER.error(timeoutErrorMessage);
540454
futureResultSet.cancel(true); // Cancel execution run
541-
MetricsUtil.exportError(
542-
this.connection.getSession(),
543-
ErrorTypes.TIMEOUT_ERROR,
544-
statementId,
545-
ErrorCodes.STATEMENT_EXECUTION_TIMEOUT);
546455
throw new DatabricksTimeoutException(timeoutErrorMessage, e);
547456
} catch (InterruptedException | ExecutionException e) {
548457
Throwable cause = e;
@@ -557,11 +466,6 @@ DatabricksResultSet executeInternal(
557466
String.format(
558467
"Error occurred during statement execution: %s. Error : %s", sql, e.getMessage());
559468
LOGGER.error(e, errMsg);
560-
MetricsUtil.exportError(
561-
this.connection.getSession(),
562-
ErrorTypes.EXECUTE_STATEMENT,
563-
"",
564-
ErrorCodes.EXECUTE_STATEMENT_FAILED);
565469
throw new DatabricksSQLException(errMsg, e, "", ErrorCodes.EXECUTE_STATEMENT_FAILED);
566470
}
567471
LOGGER.debug("Result retrieved successfully" + resultSet.toString());

src/main/java/com/databricks/jdbc/api/impl/arrow/ArrowResultChunk.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -398,11 +398,11 @@ private void logAllocatorStats(String event) {
398398
long headRoom = rootAllocator.getHeadroom();
399399
long initReservation = rootAllocator.getInitReservation();
400400

401-
String telemetryLog =
401+
String allocatorStatsLog =
402402
String.format(
403-
"Chunk telemetry - Event: %s, Chunk Index: %s, Allocated Memory: %s, Peak Memory: %s, Headroom: %s, Init Reservation: %s",
403+
"Chunk allocator stats Log - Event: %s, Chunk Index: %s, Allocated Memory: %s, Peak Memory: %s, Headroom: %s, Init Reservation: %s",
404404
event, chunkIndex, allocatedMemory, peakMemory, headRoom, initReservation);
405-
LOGGER.debug(telemetryLog);
405+
LOGGER.debug(allocatorStatsLog);
406406
}
407407

408408
public static class Builder {

src/main/java/com/databricks/jdbc/api/impl/arrow/ChunkDownloader.java

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,6 @@
22

33
import com.databricks.jdbc.api.IDatabricksSession;
44
import com.databricks.jdbc.common.CompressionType;
5-
import com.databricks.jdbc.common.ErrorCodes;
6-
import com.databricks.jdbc.common.ErrorTypes;
7-
import com.databricks.jdbc.common.util.MetricsUtil;
85
import com.databricks.jdbc.dbclient.IDatabricksHttpClient;
96
import com.databricks.jdbc.dbclient.impl.http.DatabricksHttpClient;
107
import com.databricks.jdbc.exception.DatabricksParsingException;
@@ -137,8 +134,6 @@ ArrowResultChunk getChunk() throws DatabricksSQLException {
137134
chunk.wait();
138135
}
139136
if (chunk.getStatus() != ArrowResultChunk.ChunkStatus.DOWNLOAD_SUCCEEDED) {
140-
MetricsUtil.exportError(
141-
session, ErrorTypes.CHUNK_DOWNLOAD, statementId, ErrorCodes.CHUNK_DOWNLOAD_ERROR);
142137
throw new DatabricksSQLException(chunk.getErrorMessage());
143138
}
144139
} catch (InterruptedException e) {
@@ -238,11 +233,11 @@ private static ConcurrentHashMap<Long, ArrowResultChunk> initializeChunksMap(
238233
return chunkIndexMap;
239234
}
240235
for (TSparkArrowResultLink resultLink : resultData.getResultLinks()) {
241-
String telemetryLog =
236+
String chunkInformationLog =
242237
String.format(
243-
"Manifest telemetry - Row Offset: %s, Row Count: %s, Expiry Time: %s",
238+
"Chunk information log - Row Offset: %s, Row Count: %s, Expiry Time: %s",
244239
resultLink.getStartRowOffset(), resultLink.getRowCount(), resultLink.getExpiryTime());
245-
LOGGER.debug(telemetryLog);
240+
LOGGER.debug(chunkInformationLog);
246241
chunkIndexMap.put(
247242
chunkIndex,
248243
ArrowResultChunk.builder()

0 commit comments

Comments
 (0)