Skip to content

Commit f0f59f6

Browse files
Resolve NPE when bulk copy operation is performed on computed column (#2612)
* Resolve NPE when bulk copy operation is performed on computed column * Updated test * Updated the test * Ran formatter --------- Co-authored-by: Jeff Wasty <[email protected]>
1 parent 2691a7f commit f0f59f6

File tree

3 files changed

+121
-15
lines changed

3 files changed

+121
-15
lines changed

src/main/java/com/microsoft/sqlserver/jdbc/SQLServerBulkCopy.java

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,6 @@
1818
import java.math.BigDecimal;
1919
import java.nio.ByteBuffer;
2020
import java.nio.ByteOrder;
21-
import java.security.MessageDigest;
22-
import java.security.NoSuchAlgorithmException;
2321
import java.sql.Connection;
2422
import java.sql.Date;
2523
import java.sql.ResultSet;
@@ -1764,16 +1762,16 @@ private void getDestinationMetadata() throws SQLServerException {
17641762
}
17651763

17661764
if (loggerExternal.isLoggable(Level.FINER)) {
1767-
loggerExternal.finer(this.toString() + " Acquiring existing destination column metadata " +
1768-
"from cache for bulk copy");
1765+
loggerExternal.finer(this.toString() + " Acquiring existing destination column metadata "
1766+
+ "from cache for bulk copy");
17691767
}
17701768

17711769
} else {
17721770
setDestinationColumnMetadata(escapedDestinationTableName);
17731771

17741772
if (loggerExternal.isLoggable(Level.FINER)) {
1775-
loggerExternal.finer(this.toString() + " cacheBulkCopyMetadata=false - Querying server " +
1776-
"for destination column metadata");
1773+
loggerExternal.finer(this.toString() + " cacheBulkCopyMetadata=false - Querying server "
1774+
+ "for destination column metadata");
17771775
}
17781776
}
17791777
}
@@ -1913,7 +1911,7 @@ private void validateColumnMappings() throws SQLServerException {
19131911

19141912
// Generate default column mappings
19151913
ColumnMapping cm;
1916-
for (int i = 1; i <= srcColumnCount; ++i) {
1914+
for (Integer i : destColumnMetadata.keySet()) {
19171915
// Only skip identity column mapping if KEEP IDENTITY OPTION is FALSE
19181916
if (!(destColumnMetadata.get(i).isIdentity && !copyOptions.isKeepIdentity())) {
19191917
cm = new ColumnMapping(i, i);
@@ -1967,7 +1965,7 @@ private void validateColumnMappings() throws SQLServerException {
19671965
if (-1 == cm.destinationColumnOrdinal) {
19681966
boolean foundColumn = false;
19691967

1970-
for (int j = 1; j <= destColumnCount; ++j) {
1968+
for (Integer j : destColumnMetadata.keySet()) {
19711969
if (destColumnMetadata.get(j).columnName.equals(cm.destinationColumnName)) {
19721970
foundColumn = true;
19731971
cm.destinationColumnOrdinal = j;
@@ -2128,7 +2126,8 @@ private void writeNullToTdsWriter(TDSWriter tdsWriter, int srcJdbcType,
21282126

21292127
private void writeColumnToTdsWriter(TDSWriter tdsWriter, int bulkPrecision, int bulkScale, int bulkJdbcType,
21302128
boolean bulkNullable, // should it be destNullable instead?
2131-
int srcColOrdinal, int destColOrdinal, boolean isStreaming, Object colValue, Calendar cal) throws SQLServerException {
2129+
int srcColOrdinal, int destColOrdinal, boolean isStreaming, Object colValue,
2130+
Calendar cal) throws SQLServerException {
21322131
SSType destSSType = destColumnMetadata.get(destColOrdinal).ssType;
21332132

21342133
bulkPrecision = validateSourcePrecision(bulkPrecision, bulkJdbcType,
@@ -3045,8 +3044,8 @@ private Object readColumnFromResultSet(int srcColOrdinal, int srcJdbcType, boole
30453044
/**
30463045
* Reads the given column from the result set current row and writes the data to tdsWriter.
30473046
*/
3048-
private void writeColumn(TDSWriter tdsWriter, int srcColOrdinal, int destColOrdinal,
3049-
Object colValue, Calendar cal) throws SQLServerException {
3047+
private void writeColumn(TDSWriter tdsWriter, int srcColOrdinal, int destColOrdinal, Object colValue,
3048+
Calendar cal) throws SQLServerException {
30503049
String destName = destColumnMetadata.get(destColOrdinal).columnName;
30513050
int srcPrecision, srcScale, destPrecision, srcJdbcType;
30523051
SSType destSSType = null;
@@ -3698,8 +3697,8 @@ private boolean writeBatchData(TDSWriter tdsWriter, TDSCommand command,
36983697
// Loop for each destination column. The mappings is a many to one mapping
36993698
// where multiple source columns can be mapped to one destination column.
37003699
for (ColumnMapping columnMapping : columnMappings) {
3701-
writeColumn(tdsWriter, columnMapping.sourceColumnOrdinal, columnMapping.destinationColumnOrdinal, null,
3702-
null // cell
3700+
writeColumn(tdsWriter, columnMapping.sourceColumnOrdinal, columnMapping.destinationColumnOrdinal,
3701+
null, null // cell
37033702
// value is
37043703
// retrieved
37053704
// inside

src/test/java/com/microsoft/sqlserver/jdbc/bulkCopy/BulkCopyCSVTest.java

Lines changed: 106 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,13 @@
66

77
import static org.junit.Assert.fail;
88
import static org.junit.jupiter.api.Assertions.assertEquals;
9+
import static org.junit.jupiter.api.Assertions.assertTrue;
910

1011
import java.io.BufferedReader;
1112
import java.io.FileInputStream;
1213
import java.io.FileReader;
1314
import java.io.InputStream;
1415
import java.io.InputStreamReader;
15-
import java.lang.StackOverflowError;
1616
import java.net.URL;
1717
import java.sql.Connection;
1818
import java.sql.ResultSet;
@@ -37,6 +37,7 @@
3737
import com.microsoft.sqlserver.jdbc.RandomUtil;
3838
import com.microsoft.sqlserver.jdbc.SQLServerBulkCSVFileRecord;
3939
import com.microsoft.sqlserver.jdbc.SQLServerBulkCopy;
40+
import com.microsoft.sqlserver.jdbc.SQLServerBulkCopyOptions;
4041
import com.microsoft.sqlserver.jdbc.SQLServerException;
4142
import com.microsoft.sqlserver.jdbc.TestUtils;
4243
import com.microsoft.sqlserver.testframework.AbstractSQLGenerator;
@@ -68,6 +69,7 @@ public class BulkCopyCSVTest extends AbstractTest {
6869
static String inputFileDelimiterEscape = "BulkCopyCSVTestInputDelimiterEscape.csv";
6970
static String inputFileDelimiterEscapeNoNewLineAtEnd = "BulkCopyCSVTestInputDelimiterEscapeNoNewLineAtEnd.csv";
7071
static String inputFileMultipleDoubleQuotes = "BulkCopyCSVTestInputMultipleDoubleQuotes.csv";
72+
static String computeColumnCsvFile = "BulkCopyCSVTestInputComputeColumn.csv";
7173
static String encoding = "UTF-8";
7274
static String delimiter = ",";
7375

@@ -261,7 +263,7 @@ public void testEscapeColumnDelimitersCSVNoNewLineAtEnd() throws Exception {
261263
}
262264
}
263265
}
264-
266+
265267
/**
266268
* test simple csv file for bulkcopy, for GitHub issue 1391 Tests to ensure that the set returned by
267269
* getColumnOrdinals doesn't have to be ordered
@@ -446,6 +448,108 @@ public void testCSV2400() {
446448
}
447449
}
448450

451+
/**
452+
* Test to perform bulk copy with a computed column as the last column in the table.
453+
*/
454+
@Test
455+
@DisplayName("Test bulk copy with computed column as last column")
456+
public void testBulkCopyWithComputedColumnAsLastColumn() {
457+
String tableName = AbstractSQLGenerator.escapeIdentifier(RandomUtil.getIdentifier("BulkEscape"));
458+
String fileName = filePath + computeColumnCsvFile;
459+
460+
try (Connection con = getConnection(); Statement stmt = con.createStatement();
461+
SQLServerBulkCopy bulkCopy = new SQLServerBulkCopy(con);
462+
SQLServerBulkCSVFileRecord fileRecord = new SQLServerBulkCSVFileRecord(fileName, encoding, ",", true)) {
463+
464+
String createTableSQL = "CREATE TABLE " + tableName + " (" + "[NAME] varchar(50) NOT NULL,"
465+
+ "[AGE] int NULL," + "[CAL_COL] numeric(17, 2) NULL," + "[ORIGINAL] varchar(50) NOT NULL,"
466+
+ "[COMPUTED_COL] AS (right([NAME], 8)) PERSISTED" + ")";
467+
stmt.executeUpdate(createTableSQL);
468+
469+
fileRecord.addColumnMetadata(1, "NAME", java.sql.Types.VARCHAR, 50, 0);
470+
fileRecord.addColumnMetadata(2, "AGE", java.sql.Types.INTEGER, 0, 0);
471+
fileRecord.addColumnMetadata(3, "CAL_COL", java.sql.Types.NUMERIC, 17, 2);
472+
fileRecord.addColumnMetadata(4, "ORIGINAL", java.sql.Types.VARCHAR, 50, 0);
473+
474+
bulkCopy.setDestinationTableName(tableName);
475+
476+
bulkCopy.addColumnMapping("NAME", "NAME");
477+
bulkCopy.addColumnMapping("AGE", "AGE");
478+
bulkCopy.addColumnMapping("CAL_COL", "CAL_COL");
479+
bulkCopy.addColumnMapping("ORIGINAL", "ORIGINAL");
480+
481+
SQLServerBulkCopyOptions options = new SQLServerBulkCopyOptions();
482+
options.setKeepIdentity(false);
483+
options.setTableLock(true);
484+
bulkCopy.setBulkCopyOptions(options);
485+
486+
bulkCopy.writeToServer(fileRecord);
487+
488+
try (ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM " + tableName)) {
489+
if (rs.next()) {
490+
int rowCount = rs.getInt(1);
491+
assertTrue(rowCount > 0);
492+
}
493+
} finally {
494+
TestUtils.dropTableIfExists(tableName, stmt);
495+
}
496+
} catch (Exception e) {
497+
fail(e.getMessage());
498+
}
499+
}
500+
501+
/**
502+
* Test to perform bulk copy with a computed column not as the last column in the table.
503+
*/
504+
@Test
505+
@DisplayName("Test bulk copy with computed column not as last column")
506+
public void testBulkCopyWithComputedColumnNotAsLastColumn() {
507+
String tableName = AbstractSQLGenerator.escapeIdentifier(RandomUtil.getIdentifier("BulkEscape"));
508+
String fileName = filePath + computeColumnCsvFile;
509+
510+
try (Connection con = getConnection(); Statement stmt = con.createStatement();
511+
SQLServerBulkCopy bulkCopy = new SQLServerBulkCopy(con);
512+
SQLServerBulkCSVFileRecord fileRecord = new SQLServerBulkCSVFileRecord(fileName, encoding, ",", true)) {
513+
514+
String createTableSQL = "CREATE TABLE " + tableName + " (" + "[NAME] varchar(50) NOT NULL,"
515+
+ "[AGE] int NULL," + "[CAL_COL] numeric(17, 2) NULL," + "[ORIGINAL] varchar(50) NOT NULL,"
516+
+ "[COMPUTED_COL] AS (right([NAME], 8)) PERSISTED," + "[LAST_COL] varchar(50) NULL" + ")";
517+
stmt.executeUpdate(createTableSQL);
518+
519+
fileRecord.addColumnMetadata(1, "NAME", java.sql.Types.VARCHAR, 50, 0);
520+
fileRecord.addColumnMetadata(2, "AGE", java.sql.Types.INTEGER, 0, 0);
521+
fileRecord.addColumnMetadata(3, "CAL_COL", java.sql.Types.NUMERIC, 17, 2);
522+
fileRecord.addColumnMetadata(4, "ORIGINAL", java.sql.Types.VARCHAR, 50, 0);
523+
fileRecord.addColumnMetadata(5, "LAST_COL", java.sql.Types.VARCHAR, 50, 0);
524+
525+
bulkCopy.setDestinationTableName(tableName);
526+
527+
bulkCopy.addColumnMapping("NAME", "NAME");
528+
bulkCopy.addColumnMapping("AGE", "AGE");
529+
bulkCopy.addColumnMapping("CAL_COL", "CAL_COL");
530+
bulkCopy.addColumnMapping("ORIGINAL", "ORIGINAL");
531+
bulkCopy.addColumnMapping("LAST_COL", "LAST_COL");
532+
533+
SQLServerBulkCopyOptions options = new SQLServerBulkCopyOptions();
534+
options.setKeepIdentity(false);
535+
options.setTableLock(true);
536+
bulkCopy.setBulkCopyOptions(options);
537+
538+
bulkCopy.writeToServer(fileRecord);
539+
540+
try (ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM " + tableName)) {
541+
if (rs.next()) {
542+
int rowCount = rs.getInt(1);
543+
assertTrue(rowCount > 0);
544+
}
545+
} finally {
546+
TestUtils.dropTableIfExists(tableName, stmt);
547+
}
548+
} catch (Exception e) {
549+
fail(e.getMessage());
550+
}
551+
}
552+
449553
/**
450554
* drop source table after testing bulk copy
451555
*
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
NAME,AGE,CAL_COL,ORIGINAL,LAST_COL
2+
John Doe,30,12345.67,Original,Last
3+
Jane Smith,25,54321.00,Original,Last

0 commit comments

Comments
 (0)