Skip to content

Commit 078f53f

Browse files
authored
Fix stackoverflow in logs + make logs working in squirrel (#289)
* Initial commit * Make our codebase agnostic of logging technology * test * add changes * Add slf4j * fix tests * Add pooled connection fix * Add more logs * Add print * fmt * merge conflicts
1 parent a2bc25f commit 078f53f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

47 files changed

+1688
-1228
lines changed

pom.xml

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -26,13 +26,14 @@
2626
<maven.compiler.target>11</maven.compiler.target>
2727
<mockito.version>5.2.0</mockito.version>
2828
<jackson.version>2.15.1</jackson.version>
29+
<log4j.version>2.22.1</log4j.version>
30+
<slf4j.version>2.0.13</slf4j.version>
2931
<google.guava.version>33.0.0-jre</google.guava.version>
3032
<junit.jupiter.version>5.9.2</junit.jupiter.version>
3133
<google.findbugs.annotations.version>3.0.1</google.findbugs.annotations.version>
3234
<immutables.value.version>2.9.2</immutables.value.version>
3335
<httpclient.version>4.5.14</httpclient.version>
34-
<log4japi.version>2.21.1</log4japi.version>
35-
<log4jcore.version>2.21.1</log4jcore.version>
36+
3637
<spotless.version>2.43.0</spotless.version>
3738
<commons-io.version>2.13.0</commons-io.version>
3839
<databricks-sdk.version>0.17.0</databricks-sdk.version>
@@ -86,6 +87,26 @@
8687
<artifactId>libthrift</artifactId>
8788
<version>${thrift.version}</version>
8889
</dependency>
90+
<dependency>
91+
<groupId>org.slf4j</groupId>
92+
<artifactId>slf4j-api</artifactId>
93+
<version>${slf4j.version}</version>
94+
</dependency>
95+
<dependency>
96+
<groupId>org.apache.logging.log4j</groupId>
97+
<artifactId>log4j-slf4j-impl</artifactId>
98+
<version>${log4j.version}</version>
99+
</dependency>
100+
<dependency>
101+
<groupId>org.apache.logging.log4j</groupId>
102+
<artifactId>log4j-core</artifactId>
103+
<version>${log4j.version}</version>
104+
</dependency>
105+
<dependency>
106+
<groupId>org.apache.logging.log4j</groupId>
107+
<artifactId>log4j-api</artifactId>
108+
<version>${log4j.version}</version>
109+
</dependency>
89110
<dependency>
90111
<groupId>commons-io</groupId>
91112
<artifactId>commons-io</artifactId>
@@ -119,16 +140,6 @@
119140
<version>${mockito.version}</version>
120141
<scope>test</scope>
121142
</dependency>
122-
<dependency>
123-
<groupId>org.apache.logging.log4j</groupId>
124-
<artifactId>log4j-api</artifactId>
125-
<version>${log4japi.version}</version>
126-
</dependency>
127-
<dependency>
128-
<groupId>org.apache.logging.log4j</groupId>
129-
<artifactId>log4j-core</artifactId>
130-
<version>${log4jcore.version}</version>
131-
</dependency>
132143
<dependency>
133144
<groupId>com.fasterxml.jackson.core</groupId>
134145
<artifactId>jackson-databind</artifactId>

src/main/java/com/databricks/jdbc/client/http/DatabricksHttpClient.java

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88
import com.databricks.jdbc.client.DatabricksHttpException;
99
import com.databricks.jdbc.client.DatabricksRetryHandlerException;
1010
import com.databricks.jdbc.client.IDatabricksHttpClient;
11+
import com.databricks.jdbc.commons.LogLevel;
12+
import com.databricks.jdbc.commons.util.LoggingUtil;
1113
import com.databricks.jdbc.driver.IDatabricksConnectionContext;
1214
import com.databricks.sdk.core.UserAgent;
1315
import com.google.common.annotations.VisibleForTesting;
@@ -35,14 +37,10 @@
3537
import org.apache.http.impl.conn.DefaultSchemePortResolver;
3638
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
3739
import org.apache.http.protocol.HttpContext;
38-
import org.apache.logging.log4j.LogManager;
39-
import org.apache.logging.log4j.Logger;
4040

4141
/** Http client implementation to be used for executing http requests. */
4242
public class DatabricksHttpClient implements IDatabricksHttpClient {
4343

44-
private static final Logger LOGGER = LogManager.getLogger(DatabricksHttpClient.class);
45-
4644
// Context attribute keys
4745
private static final String RETRY_INTERVAL_KEY = "retryInterval";
4846
private static final String TEMP_UNAVAILABLE_RETRY_COUNT_KEY = "tempUnavailableRetryCount";
@@ -338,7 +336,9 @@ public static synchronized DatabricksHttpClient getInstance(
338336

339337
@Override
340338
public CloseableHttpResponse execute(HttpUriRequest request) throws DatabricksHttpException {
341-
LOGGER.debug("Executing HTTP request [{}]", RequestSanitizer.sanitizeRequest(request));
339+
LoggingUtil.log(
340+
LogLevel.DEBUG,
341+
String.format("Executing HTTP request [{%s}]", RequestSanitizer.sanitizeRequest(request)));
342342
try {
343343
return httpClient.execute(request);
344344
} catch (IOException e) {
@@ -351,9 +351,9 @@ public CloseableHttpResponse execute(HttpUriRequest request) throws DatabricksHt
351351
}
352352
String errorMsg =
353353
String.format(
354-
"Caught error while executing http request: [%s]",
355-
RequestSanitizer.sanitizeRequest(request));
356-
LOGGER.error(errorMsg, e);
354+
"Caught error while executing http request: [%s]. Error Message: [%s]",
355+
RequestSanitizer.sanitizeRequest(request), e);
356+
LoggingUtil.log(LogLevel.ERROR, errorMsg);
357357
throw new DatabricksHttpException(errorMsg, e);
358358
}
359359
}
@@ -362,7 +362,9 @@ public CloseableHttpResponse execute(HttpUriRequest request) throws DatabricksHt
362362
public void closeExpiredAndIdleConnections() {
363363
if (connectionManager != null) {
364364
synchronized (connectionManager) {
365-
LOGGER.debug("connection pool stats: {}", connectionManager.getTotalStats());
365+
LoggingUtil.log(
366+
LogLevel.DEBUG,
367+
String.format("connection pool stats: {%s}", connectionManager.getTotalStats()));
366368
connectionManager.closeExpiredConnections();
367369
connectionManager.closeIdleConnections(idleHttpConnectionExpiry, TimeUnit.SECONDS);
368370
}
@@ -398,7 +400,8 @@ public static synchronized void removeInstance(IDatabricksConnectionContext cont
398400
try {
399401
instance.httpClient.close();
400402
} catch (IOException e) {
401-
LOGGER.error("Caught error while closing http client", e);
403+
LoggingUtil.log(
404+
LogLevel.DEBUG, String.format("Caught error while closing http client. Error %s", e));
402405
}
403406
}
404407
}

src/main/java/com/databricks/jdbc/client/impl/helper/CommandBuilder.java

Lines changed: 19 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,17 +4,16 @@
44
import static com.databricks.jdbc.commons.util.ValidationUtil.throwErrorIfNull;
55
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.*;
66

7+
import com.databricks.jdbc.commons.LogLevel;
8+
import com.databricks.jdbc.commons.util.LoggingUtil;
79
import com.databricks.jdbc.commons.util.WildcardUtil;
810
import com.databricks.jdbc.core.DatabricksSQLFeatureNotSupportedException;
911
import com.databricks.jdbc.core.IDatabricksSession;
1012
import java.sql.SQLException;
1113
import java.util.Collections;
1214
import java.util.HashMap;
13-
import org.apache.logging.log4j.LogManager;
14-
import org.apache.logging.log4j.Logger;
1515

1616
public class CommandBuilder {
17-
private static final Logger LOGGER = LogManager.getLogger(CommandBuilder.class);
1817
private String catalogName = null;
1918
private String schemaName = null;
2019
private String tableName = null;
@@ -46,25 +45,33 @@ public CommandBuilder setTable(String tableName) {
4645

4746
public CommandBuilder setSchemaPattern(String pattern) {
4847
this.schemaPattern = WildcardUtil.jdbcPatternToHive(pattern);
49-
LOGGER.debug("Schema pattern conversion {} -> {}", pattern, schemaPattern);
48+
LoggingUtil.log(
49+
LogLevel.DEBUG,
50+
String.format("Schema pattern conversion {%s} -> {%s}", pattern, schemaPattern));
5051
return this;
5152
}
5253

5354
public CommandBuilder setTablePattern(String pattern) {
5455
this.tablePattern = WildcardUtil.jdbcPatternToHive(pattern);
55-
LOGGER.debug("Table pattern conversion {} -> {}", pattern, tablePattern);
56+
LoggingUtil.log(
57+
LogLevel.DEBUG,
58+
String.format("Table pattern conversion {%s} -> {%s}", pattern, tablePattern));
5659
return this;
5760
}
5861

5962
public CommandBuilder setColumnPattern(String pattern) {
6063
this.columnPattern = WildcardUtil.jdbcPatternToHive(pattern);
61-
LOGGER.debug("Column pattern conversion {} -> {}", pattern, columnPattern);
64+
LoggingUtil.log(
65+
LogLevel.DEBUG,
66+
String.format("Column pattern conversion {%s} -> {%s}", pattern, columnPattern));
6267
return this;
6368
}
6469

6570
public CommandBuilder setFunctionPattern(String pattern) {
6671
this.functionPattern = WildcardUtil.jdbcPatternToHive(pattern);
67-
LOGGER.debug("Function pattern conversion {} -> {}", pattern, functionPattern);
72+
LoggingUtil.log(
73+
LogLevel.DEBUG,
74+
String.format("Function pattern conversion {%s} -> {%s}", pattern, functionPattern));
6875
return this;
6976
}
7077

@@ -77,7 +84,7 @@ private String fetchSchemaSQL() throws SQLException {
7784
String.format(
7885
"Building command for fetching schema. Catalog %s, SchemaPattern %s and session context %s",
7986
catalogName, schemaPattern, sessionContext);
80-
LOGGER.debug(contextString);
87+
LoggingUtil.log(LogLevel.DEBUG, contextString);
8188
throwErrorIfNull(Collections.singletonMap(CATALOG, catalogName), contextString);
8289
String showSchemaSQL = String.format(SHOW_SCHEMA_IN_CATALOG_SQL, catalogName);
8390
if (!WildcardUtil.isNullOrEmpty(schemaPattern)) {
@@ -91,7 +98,7 @@ private String fetchTablesSQL() throws SQLException {
9198
String.format(
9299
"Building command for fetching tables. Catalog %s, SchemaPattern %s, TablePattern %s and session context %s",
93100
catalogName, schemaPattern, tablePattern, sessionContext);
94-
LOGGER.debug(contextString);
101+
LoggingUtil.log(LogLevel.DEBUG, contextString);
95102
throwErrorIfNull(Collections.singletonMap(CATALOG, catalogName), contextString);
96103
String showTablesSQL = String.format(SHOW_TABLES_SQL, catalogName);
97104
if (!WildcardUtil.isNullOrEmpty(schemaPattern)) {
@@ -108,7 +115,7 @@ private String fetchColumnsSQL() throws SQLException {
108115
String.format(
109116
"Building command for fetching columns. Catalog %s, SchemaPattern %s, TablePattern %s, ColumnPattern %s and session context : %s",
110117
catalogName, schemaPattern, tablePattern, columnPattern, sessionContext);
111-
LOGGER.debug(contextString);
118+
LoggingUtil.log(LogLevel.DEBUG, contextString);
112119
throwErrorIfNull(Collections.singletonMap(CATALOG, catalogName), contextString);
113120
String showColumnsSQL = String.format(SHOW_COLUMNS_SQL, catalogName);
114121

@@ -132,7 +139,7 @@ private String fetchFunctionsSQL() throws SQLException {
132139
"Building command for fetching functions. Catalog %s, SchemaPattern %s, FunctionPattern %s. With session context %s",
133140
catalogName, schemaPattern, functionPattern, sessionContext);
134141

135-
LOGGER.debug(contextString);
142+
LoggingUtil.log(LogLevel.DEBUG, contextString);
136143
throwErrorIfNull(Collections.singletonMap(CATALOG, catalogName), contextString);
137144
String showFunctionsSQL = String.format(SHOW_FUNCTIONS_SQL, catalogName);
138145
if (!WildcardUtil.isNullOrEmpty(schemaPattern)) {
@@ -153,7 +160,7 @@ private String fetchPrimaryKeysSQL() throws SQLException {
153160
String.format(
154161
"Building command for fetching primary keys. Catalog %s, Schema %s, Table %s. With session context: %s",
155162
catalogName, schemaName, tableName, sessionContext);
156-
LOGGER.debug(contextString);
163+
LoggingUtil.log(LogLevel.DEBUG, contextString);
157164
HashMap<String, String> hashMap = new HashMap<>();
158165
hashMap.put(CATALOG, catalogName);
159166
hashMap.put(SCHEMA, schemaName);

src/main/java/com/databricks/jdbc/client/impl/sdk/DatabricksMetadataSdkClient.java

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44

55
import com.databricks.jdbc.client.DatabricksMetadataClient;
66
import com.databricks.jdbc.client.StatementType;
7+
import com.databricks.jdbc.commons.LogLevel;
8+
import com.databricks.jdbc.commons.util.LoggingUtil;
79
import com.databricks.jdbc.commons.util.WildcardUtil;
810
import com.databricks.jdbc.core.DatabricksResultSet;
911
import com.databricks.jdbc.core.IDatabricksSession;
@@ -18,14 +20,10 @@
1820
import java.util.concurrent.CopyOnWriteArrayList;
1921
import java.util.concurrent.ExecutorService;
2022
import java.util.concurrent.Executors;
21-
import org.apache.logging.log4j.LogManager;
22-
import org.apache.logging.log4j.Logger;
2323

2424
/** Implementation for DatabricksMetadataClient using SDK client */
2525
public class DatabricksMetadataSdkClient implements DatabricksMetadataClient {
2626

27-
private static final Logger LOGGER = LogManager.getLogger(DatabricksSdkClient.class);
28-
2927
private final DatabricksSdkClient sdkClient;
3028

3129
public DatabricksMetadataSdkClient(DatabricksSdkClient sdkClient) {
@@ -40,7 +38,8 @@ public DatabricksResultSet listTypeInfo(IDatabricksSession session) {
4038
@Override
4139
public DatabricksResultSet listCatalogs(IDatabricksSession session) throws SQLException {
4240
String showCatalogsSQL = "show catalogs";
43-
LOGGER.debug("SQL command to fetch catalogs: {}", showCatalogsSQL);
41+
LoggingUtil.log(
42+
LogLevel.DEBUG, String.format("SQL command to fetch catalogs: {%s}", showCatalogsSQL));
4443

4544
ResultSet rs =
4645
sdkClient.executeStatement(
@@ -98,7 +97,7 @@ public DatabricksResultSet listSchemas(
9897
if (!WildcardUtil.isMatchAnything(schemaWithContext)) {
9998
showSchemaSQL += " like '" + schemaNamePattern + "'";
10099
}
101-
LOGGER.debug("SQL command to fetch schemas: {}", showSchemaSQL);
100+
LoggingUtil.log(LogLevel.DEBUG, "SQL command to fetch schemas: " + showSchemaSQL);
102101
try {
103102
ResultSet rs =
104103
sdkClient.executeStatement(
@@ -169,7 +168,7 @@ public DatabricksResultSet listTables(
169168
if (!WildcardUtil.isMatchAnything(tableWithContext)) {
170169
showTablesSQL += " like '" + tableWithContext + "'";
171170
}
172-
LOGGER.debug("SQL command to fetch tables: {}", showTablesSQL);
171+
LoggingUtil.log(LogLevel.DEBUG, "SQL command to fetch tables: " + showTablesSQL);
173172
try {
174173
ResultSet rs =
175174
sdkClient.executeStatement(
@@ -271,7 +270,7 @@ public DatabricksResultSet listColumns(
271270
String[] combination = catalogSchemaTableCombinations.poll();
272271
String showColumnsSQL =
273272
"show columns in " + combination[0] + "." + combination[1] + "." + combination[2];
274-
LOGGER.debug("SQL command to fetch columns: {}", showColumnsSQL);
273+
LoggingUtil.log(LogLevel.DEBUG, "SQL command to fetch columns: " + showColumnsSQL);
275274
try {
276275
ResultSet rs =
277276
sdkClient.executeStatement(

src/main/java/com/databricks/jdbc/client/impl/sdk/DatabricksNewMetadataSdkClient.java

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,17 +8,19 @@
88
import com.databricks.jdbc.client.impl.helper.CommandBuilder;
99
import com.databricks.jdbc.client.impl.helper.CommandName;
1010
import com.databricks.jdbc.client.impl.helper.MetadataResultSetBuilder;
11+
import com.databricks.jdbc.commons.LogLevel;
1112
import com.databricks.jdbc.commons.MetricsList;
13+
import com.databricks.jdbc.commons.util.LoggingUtil;
14+
import com.databricks.jdbc.core.*;
1215
import com.databricks.jdbc.core.DatabricksResultSet;
1316
import com.databricks.jdbc.core.IDatabricksSession;
1417
import com.databricks.jdbc.core.ImmutableSqlParameter;
1518
import com.databricks.jdbc.driver.IDatabricksConnectionContext;
1619
import java.sql.ResultSet;
1720
import java.sql.SQLException;
21+
import java.util.*;
1822
import java.util.HashMap;
1923
import java.util.Optional;
20-
import org.apache.logging.log4j.LogManager;
21-
import org.apache.logging.log4j.Logger;
2224

2325
/**
2426
* This is for the new SQL commands added in runtime. Note that the DatabricksMetadataSdkClient will
@@ -27,8 +29,6 @@
2729
* Tracking bug for replacement: (PECO-1502)
2830
*/
2931
public class DatabricksNewMetadataSdkClient implements DatabricksMetadataClient {
30-
31-
private static final Logger LOGGER = LogManager.getLogger(DatabricksNewMetadataSdkClient.class);
3232
private final DatabricksSdkClient sdkClient;
3333

3434
public DatabricksNewMetadataSdkClient(DatabricksSdkClient sdkClient) {
@@ -37,7 +37,7 @@ public DatabricksNewMetadataSdkClient(DatabricksSdkClient sdkClient) {
3737

3838
@Override
3939
public DatabricksResultSet listTypeInfo(IDatabricksSession session) {
40-
LOGGER.debug("public ResultSet getTypeInfo()");
40+
LoggingUtil.log(LogLevel.DEBUG, "public ResultSet getTypeInfo()");
4141
return TYPE_INFO_RESULT;
4242
}
4343

@@ -46,7 +46,7 @@ public DatabricksResultSet listCatalogs(IDatabricksSession session) throws SQLEx
4646
long startTime = System.currentTimeMillis();
4747
CommandBuilder commandBuilder = new CommandBuilder(session);
4848
String SQL = commandBuilder.getSQLString(CommandName.LIST_CATALOGS);
49-
LOGGER.debug("SQL command to fetch catalogs: {}", SQL);
49+
LoggingUtil.log(LogLevel.DEBUG, String.format("SQL command to fetch catalogs: {%s}", SQL));
5050
DatabricksResultSet resultSet =
5151
MetadataResultSetBuilder.getCatalogsResult(
5252
getResultSet(SQL, session, StatementType.METADATA));
@@ -65,7 +65,7 @@ public DatabricksResultSet listSchemas(
6565
CommandBuilder commandBuilder =
6666
new CommandBuilder(catalog, session).setSchemaPattern(schemaNamePattern);
6767
String SQL = commandBuilder.getSQLString(CommandName.LIST_SCHEMAS);
68-
LOGGER.debug("SQL command to fetch schemas: {}", SQL);
68+
LoggingUtil.log(LogLevel.DEBUG, String.format("SQL command to fetch schemas: {%s}", SQL));
6969
DatabricksResultSet resultSet =
7070
MetadataResultSetBuilder.getSchemasResult(
7171
getResultSet(SQL, session, StatementType.METADATA), catalog);
@@ -108,7 +108,8 @@ public DatabricksResultSet listTables(
108108

109109
@Override
110110
public DatabricksResultSet listTableTypes(IDatabricksSession session) throws SQLException {
111-
LOGGER.debug("Returning list of table types.");
111+
112+
LoggingUtil.log(LogLevel.DEBUG, "Returning list of table types.");
112113
long startTime = System.currentTimeMillis();
113114
DatabricksResultSet resultSet = MetadataResultSetBuilder.getTableTypesResult();
114115
IDatabricksConnectionContext connectionContext = session.getConnectionContext();
@@ -158,7 +159,7 @@ public DatabricksResultSet listFunctions(
158159
.setSchemaPattern(schemaNamePattern)
159160
.setFunctionPattern(functionNamePattern);
160161
String SQL = commandBuilder.getSQLString(CommandName.LIST_FUNCTIONS);
161-
LOGGER.debug("SQL command to fetch functions: {}", SQL);
162+
LoggingUtil.log(LogLevel.DEBUG, String.format("SQL command to fetch functions: {%s}", SQL));
162163
DatabricksResultSet resultSet =
163164
MetadataResultSetBuilder.getFunctionsResult(
164165
getResultSet(SQL, session, StatementType.QUERY), catalog);
@@ -177,7 +178,7 @@ public DatabricksResultSet listPrimaryKeys(
177178
CommandBuilder commandBuilder =
178179
new CommandBuilder(catalog, session).setSchema(schema).setTable(table);
179180
String SQL = commandBuilder.getSQLString(CommandName.LIST_PRIMARY_KEYS);
180-
LOGGER.debug("SQL command to fetch primary keys: {}", SQL);
181+
LoggingUtil.log(LogLevel.DEBUG, String.format("SQL command to fetch primary keys: {%s}", SQL));
181182
DatabricksResultSet resultSet =
182183
MetadataResultSetBuilder.getPrimaryKeysResult(
183184
getResultSet(SQL, session, StatementType.METADATA));

0 commit comments

Comments
 (0)