Skip to content

Commit 1a3858a

Browse files
ketank-newsrowen
authored andcommitted
[SPARK-26985][CORE] Fix "access only some column of the all of columns " for big endian architecture BUG
continuation to apache#24788 ## What changes were proposed in this pull request? Changes are related to BIG ENDIAN system This changes are done to identify s390x platform. use byteorder to BIG_ENDIAN for big endian systems changes for 2 are done in access functions putFloats() and putDouble() ## How was this patch tested? Changes have been tested to build successfully on s390x as well x86 platform to make sure build is successful. Closes apache#24861 from ketank-new/ketan_latest_v2.3.2. Authored-by: ketank-new <[email protected]> Signed-off-by: Sean Owen <[email protected]>
1 parent c83b3dd commit 1a3858a

File tree

3 files changed

+5
-5
lines changed

3 files changed

+5
-5
lines changed

common/unsafe/src/main/java/org/apache/spark/unsafe/Platform.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -304,7 +304,7 @@ public static void throwException(Throwable t) {
304304
static {
305305
boolean _unaligned;
306306
String arch = System.getProperty("os.arch", "");
307-
if (arch.equals("ppc64le") || arch.equals("ppc64")) {
307+
if (arch.equals("ppc64le") || arch.equals("ppc64") || arch.equals("s390x")) {
308308
// Since java.nio.Bits.unaligned() doesn't return true on ppc (See JDK-8165231), but
309309
// ppc64 and ppc64le support it
310310
_unaligned = true;

sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -417,7 +417,7 @@ public void putFloats(int rowId, int count, byte[] src, int srcIndex) {
417417
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex,
418418
null, data + rowId * 4L, count * 4L);
419419
} else {
420-
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.LITTLE_ENDIAN);
420+
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.BIG_ENDIAN);
421421
long offset = data + 4L * rowId;
422422
for (int i = 0; i < count; ++i, offset += 4) {
423423
Platform.putFloat(null, offset, bb.getFloat(srcIndex + (4 * i)));
@@ -472,7 +472,7 @@ public void putDoubles(int rowId, int count, byte[] src, int srcIndex) {
472472
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex,
473473
null, data + rowId * 8L, count * 8L);
474474
} else {
475-
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.LITTLE_ENDIAN);
475+
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.BIG_ENDIAN);
476476
long offset = data + 8L * rowId;
477477
for (int i = 0; i < count; ++i, offset += 8) {
478478
Platform.putDouble(null, offset, bb.getDouble(srcIndex + (8 * i)));

sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -396,7 +396,7 @@ public void putFloats(int rowId, int count, byte[] src, int srcIndex) {
396396
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex, floatData,
397397
Platform.DOUBLE_ARRAY_OFFSET + rowId * 4L, count * 4L);
398398
} else {
399-
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.LITTLE_ENDIAN);
399+
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.BIG_ENDIAN);
400400
for (int i = 0; i < count; ++i) {
401401
floatData[i + rowId] = bb.getFloat(srcIndex + (4 * i));
402402
}
@@ -445,7 +445,7 @@ public void putDoubles(int rowId, int count, byte[] src, int srcIndex) {
445445
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex, doubleData,
446446
Platform.DOUBLE_ARRAY_OFFSET + rowId * 8L, count * 8L);
447447
} else {
448-
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.LITTLE_ENDIAN);
448+
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.BIG_ENDIAN);
449449
for (int i = 0; i < count; ++i) {
450450
doubleData[i + rowId] = bb.getDouble(srcIndex + (8 * i));
451451
}

0 commit comments

Comments
 (0)