Skip to content

Commit dff82dd

Browse files
authored
[PECOBLR-620] Fixed byteBufferToString function and added unit tests (#908)
## Description Fixed byteBufferToString function: The function assumed the byteBuffer size to be a Long and resulted in buggy output for size 16. This resulted in buggy value of sessionId and some logging statements. Removed the extra logged statementId. ## Testing - Tested locally Related tickets: [PECOBLR-620](https://databricks.atlassian.net/browse/PECOBLR-620) [PECOBLR-620]: https://databricks.atlassian.net/browse/PECOBLR-620?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ `NO_CHANGELOG=true`
1 parent 57e9fab commit dff82dd

File tree

3 files changed

+27
-5
lines changed

3 files changed

+27
-5
lines changed

src/main/java/com/databricks/jdbc/common/DatabricksJdbcConstants.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ public final class DatabricksJdbcConstants {
6767
public static final String VOLUME_OPERATION_STATUS_COLUMN_NAME = "operation_status";
6868
public static final String VOLUME_OPERATION_STATUS_SUCCEEDED = "SUCCEEDED";
6969
public static final int VOLUME_OPERATION_MAX_RETRIES = 3;
70+
public static final int UUID_LENGTH = 16;
7071

7172
public static final String ARROW_METADATA_KEY = "Spark:DataType:SqlName";
7273
public static final Map<String, String> ALLOWED_SESSION_CONF_TO_DEFAULT_VALUES_MAP =

src/main/java/com/databricks/jdbc/common/util/DatabricksThriftUtil.java

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
import com.databricks.jdbc.api.internal.IDatabricksSession;
88
import com.databricks.jdbc.api.internal.IDatabricksStatementInternal;
9+
import com.databricks.jdbc.common.DatabricksJdbcConstants;
910
import com.databricks.jdbc.dbclient.impl.common.StatementId;
1011
import com.databricks.jdbc.exception.DatabricksHttpException;
1112
import com.databricks.jdbc.exception.DatabricksSQLException;
@@ -56,8 +57,15 @@ public static TNamespace getNamespace(String catalog, String schema) {
5657

5758
public static String byteBufferToString(ByteBuffer buffer) {
5859
ByteBuffer newBuffer = buffer.duplicate(); // This is to avoid a BufferUnderflowException
59-
long sigBits = newBuffer.getLong();
60-
return new UUID(sigBits, sigBits).toString();
60+
// sessionId and statementID have guid which are 16 bytes long
61+
if (newBuffer.remaining() >= DatabricksJdbcConstants.UUID_LENGTH) {
62+
long mostSigBits = newBuffer.getLong();
63+
long leastSigBits = newBuffer.getLong();
64+
return new UUID(mostSigBits, leastSigBits).toString();
65+
} else {
66+
long sigBits = newBuffer.getLong();
67+
return new UUID(sigBits, sigBits).toString();
68+
}
6169
}
6270

6371
public static ExternalLink createExternalLink(TSparkArrowResultLink chunkInfo, long chunkIndex) {
@@ -286,9 +294,7 @@ public static List<List<Object>> convertColumnarToRowBased(
286294
public static TOperationHandle getOperationHandle(StatementId statementId) {
287295
THandleIdentifier identifier = statementId.toOperationIdentifier();
288296
// This will help logging the statement-Id in readable format for debugging purposes
289-
LOGGER.debug(
290-
"getOperationHandle {%s} for statementId {%s}",
291-
statementId, byteBufferToString(identifier.guid));
297+
LOGGER.debug("getOperationHandle for statementId {%s}", byteBufferToString(identifier.guid));
292298
return new TOperationHandle()
293299
.setOperationId(identifier)
294300
.setOperationType(TOperationType.UNKNOWN);

src/test/java/com/databricks/jdbc/common/util/DatabricksThriftUtilTest.java

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
import com.databricks.jdbc.api.internal.IDatabricksSession;
1010
import com.databricks.jdbc.api.internal.IDatabricksStatementInternal;
11+
import com.databricks.jdbc.common.DatabricksJdbcConstants;
1112
import com.databricks.jdbc.exception.DatabricksHttpException;
1213
import com.databricks.jdbc.exception.DatabricksSQLException;
1314
import com.databricks.jdbc.model.client.thrift.generated.*;
@@ -43,6 +44,20 @@ void testByteBufferToString() {
4344
assertEquals(expectedUUID, result);
4445
}
4546

47+
@Test
48+
void testByteBufferToStringWithUuidLengthBytes() {
49+
DatabricksThriftUtil helper = new DatabricksThriftUtil();
50+
long mostSigBits = 987654321L;
51+
long leastSigBits = 123456789L;
52+
ByteBuffer buffer = ByteBuffer.allocate(DatabricksJdbcConstants.UUID_LENGTH);
53+
buffer.putLong(mostSigBits);
54+
buffer.putLong(leastSigBits);
55+
buffer.flip();
56+
String result = helper.byteBufferToString(buffer);
57+
String expectedUUID = new UUID(mostSigBits, leastSigBits).toString();
58+
assertEquals(expectedUUID, result);
59+
}
60+
4661
@Test
4762
void testVerifySuccessStatus() {
4863
assertDoesNotThrow(

0 commit comments

Comments
 (0)