Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -499,8 +499,13 @@ public RexNode visitFunction(Function node, CalcitePlanContext context) {
List<RexNode> arguments = new ArrayList<>();

boolean isCoalesce = "coalesce".equalsIgnoreCase(node.getFuncName());
boolean wasInCoalesce = context.isInCoalesceFunction();
if (isCoalesce) {
context.setInCoalesceFunction(true);
} else {
// Disable the flag for nested non-COALESCE functions so that unresolved field
// names inside e.g. array_compact(...) are not silently replaced with null.
context.setInCoalesceFunction(false);
}

List<RexNode> capturedVars = null;
Expand Down Expand Up @@ -529,9 +534,8 @@ public RexNode visitFunction(Function node, CalcitePlanContext context) {
}
}
} finally {
if (isCoalesce) {
context.setInCoalesceFunction(false);
}
// Restore the previous COALESCE context state
context.setInCoalesceFunction(wasInCoalesce);
}

// For transform/mvmap functions with captured variables, add them as additional arguments
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ private static Optional<RexNode> replaceWithNullLiteralInCoalesce(CalcitePlanCon
if (context.isInCoalesceFunction()) {
return Optional.of(
context.rexBuilder.makeNullLiteral(
context.rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR)));
context.rexBuilder.getTypeFactory().createSqlType(SqlTypeName.NULL)));
}
return Optional.empty();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,11 +93,32 @@ public SqlReturnTypeInference getReturnTypeInference() {
return opBinding -> {
var operandTypes = opBinding.collectOperandTypes();

// Let Calcite determine the least restrictive common type
var commonType = opBinding.getTypeFactory().leastRestrictive(operandTypes);
return commonType != null
? commonType
: opBinding.getTypeFactory().createSqlType(SqlTypeName.VARCHAR);
// Filter out NULL-typed operands (from unresolved field names) so they don't
// influence the least-restrictive type. Without this, leastRestrictive([NULL, INT])
// could return NULL instead of INT in edge cases.
var nonNullTypes =
operandTypes.stream()
.filter(t -> t.getSqlTypeName() != SqlTypeName.NULL)
.collect(java.util.stream.Collectors.toList());

// If all operands are NULL-typed, fall back to VARCHAR
if (nonNullTypes.isEmpty()) {
return opBinding
.getTypeFactory()
.createTypeWithNullability(
opBinding.getTypeFactory().createSqlType(SqlTypeName.VARCHAR), true);
}

// Let Calcite determine the least restrictive common type from non-null operands
var commonType = opBinding.getTypeFactory().leastRestrictive(nonNullTypes);
if (commonType != null) {
// Result should be nullable since COALESCE may return null
return opBinding.getTypeFactory().createTypeWithNullability(commonType, true);
}
return opBinding
.getTypeFactory()
.createTypeWithNullability(
opBinding.getTypeFactory().createSqlType(SqlTypeName.VARCHAR), true);
};
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,4 +126,39 @@ public void testLeastRestrictiveDelegatesToSuperForPlainTypes() {
assertNotNull(result);
assertEquals(SqlTypeName.INTEGER, result.getSqlTypeName());
}

@Test
public void testLeastRestrictiveNullAndIntegerReturnsInteger() {
// Regression test for https://github.com/opensearch-project/sql/issues/5175
RelDataType nullType = TYPE_FACTORY.createSqlType(SqlTypeName.NULL);
RelDataType intType = TYPE_FACTORY.createSqlType(SqlTypeName.INTEGER);

RelDataType result = TYPE_FACTORY.leastRestrictive(List.of(nullType, intType));

assertNotNull(result);
assertEquals(SqlTypeName.INTEGER, result.getSqlTypeName());
}

@Test
public void testLeastRestrictiveIntegerAndNullReturnsInteger() {
// Regression test for https://github.com/opensearch-project/sql/issues/5175
RelDataType intType = TYPE_FACTORY.createSqlType(SqlTypeName.INTEGER);
RelDataType nullType = TYPE_FACTORY.createSqlType(SqlTypeName.NULL);

RelDataType result = TYPE_FACTORY.leastRestrictive(List.of(intType, nullType));

assertNotNull(result);
assertEquals(SqlTypeName.INTEGER, result.getSqlTypeName());
}

@Test
public void testLeastRestrictiveNullAndDoubleReturnsDouble() {
RelDataType nullType = TYPE_FACTORY.createSqlType(SqlTypeName.NULL);
RelDataType doubleType = TYPE_FACTORY.createSqlType(SqlTypeName.DOUBLE);

RelDataType result = TYPE_FACTORY.leastRestrictive(List.of(nullType, doubleType));

assertNotNull(result);
assertEquals(SqlTypeName.DOUBLE, result.getSqlTypeName());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,61 @@ public void testCoalesceTypeCoercionWithMixedTypes() throws IOException {
verifyDataRows(actual, rows(70, "70"), rows(30, "30"));
}

@Test
public void testCoalesceWithNullAndIntegerLiteral() throws IOException {
// Regression test for https://github.com/opensearch-project/sql/issues/5175
// COALESCE(null, 42) should return integer type, not string
JSONObject actual =
executeQuery(
String.format(
"source=%s | eval x = coalesce(null, 42) | fields x | head 1",
TEST_INDEX_STATE_COUNTRY_WITH_NULL));

verifySchema(actual, schema("x", "int"));
verifyDataRows(actual, rows(42));
}

@Test
public void testCoalesceWithIntegerAndNullLiteral() throws IOException {
// Regression test for https://github.com/opensearch-project/sql/issues/5175
// COALESCE(42, null) should return integer type, not string
JSONObject actual =
executeQuery(
String.format(
"source=%s | eval x = coalesce(42, null) | fields x | head 1",
TEST_INDEX_STATE_COUNTRY_WITH_NULL));

verifySchema(actual, schema("x", "int"));
verifyDataRows(actual, rows(42));
}

@Test
public void testCoalesceWithNonExistentFieldAndIntegerLiteral() throws IOException {
// Regression test for https://github.com/opensearch-project/sql/issues/5175
// COALESCE(nonexistent_field, 42) should return integer type
JSONObject actual =
executeQuery(
String.format(
"source=%s | eval x = coalesce(nonexistent_field, 42) | fields x | head 1",
TEST_INDEX_STATE_COUNTRY_WITH_NULL));

verifySchema(actual, schema("x", "int"));
verifyDataRows(actual, rows(42));
}

@Test
public void testCoalesceWithNullAndDoubleLiteral() throws IOException {
// Regression test for https://github.com/opensearch-project/sql/issues/5175
JSONObject actual =
executeQuery(
String.format(
"source=%s | eval x = coalesce(null, 3.14) | fields x | head 1",
TEST_INDEX_STATE_COUNTRY_WITH_NULL));

verifySchema(actual, schema("x", "double"));
verifyDataRows(actual, rows(3.14));
}

@Test
public void testCoalesceWithCompatibleNumericAndTemporalTypes() throws IOException {
JSONObject actual =
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
# Issue: https://github.com/opensearch-project/sql/issues/5175
# COALESCE(null, 42) returns wrong type string instead of int due to
# leastRestrictive type inference when null is typed as VARCHAR.

setup:
- do:
query.settings:
body:
transient:
plugins.calcite.enabled: true
- do:
indices.create:
index: test_issue_5175
body:
settings:
number_of_shards: 1
number_of_replicas: 0
mappings:
properties:
dummy:
type: keyword
- do:
bulk:
index: test_issue_5175
refresh: true
body:
- '{"index": {}}'
- '{"dummy": "row"}'

---
teardown:
- do:
query.settings:
body:
transient:
plugins.calcite.enabled: false
- do:
indices.delete:
index: test_issue_5175

---
"COALESCE(null, 42) should return integer type":
- skip:
features:
- headers
- do:
headers:
Content-Type: 'application/json'
ppl:
body:
query: source=test_issue_5175 | eval x = coalesce(null, 42) | fields x | head 1
- match: { total: 1 }
- length: { datarows: 1 }
- match: { "schema": [{"name": "x", "type": "int"}] }
- match: { "datarows": [[42]] }

---
"COALESCE(42, null) should return integer type":
- skip:
features:
- headers
- do:
headers:
Content-Type: 'application/json'
ppl:
body:
query: source=test_issue_5175 | eval x = coalesce(42, null) | fields x | head 1
- match: { total: 1 }
- length: { datarows: 1 }
- match: { "schema": [{"name": "x", "type": "int"}] }
- match: { "datarows": [[42]] }

---
"COALESCE(nonexistent_field, 42) should return integer type":
- skip:
features:
- headers
- do:
headers:
Content-Type: 'application/json'
ppl:
body:
query: source=test_issue_5175 | eval x = coalesce(nonexistent_field, 42) | fields x | head 1
- match: { total: 1 }
- length: { datarows: 1 }
- match: { "schema": [{"name": "x", "type": "int"}] }
- match: { "datarows": [[42]] }

---
"COALESCE(null, 3.14) should return double type":
- skip:
features:
- headers
- do:
headers:
Content-Type: 'application/json'
ppl:
body:
query: source=test_issue_5175 | eval x = coalesce(null, 3.14) | fields x | head 1
- match: { total: 1 }
- length: { datarows: 1 }
- match: { "schema": [{"name": "x", "type": "double"}] }
- match: { "datarows": [[3.14]] }
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ public void testCoalesceWithNonExistentField() {
RelNode root = getRelNode(ppl);
String expectedLogical =
"LogicalSort(fetch=[2])\n"
+ " LogicalProject(EMPNO=[$0], result=[COALESCE(null:VARCHAR, $1)])\n"
+ " LogicalProject(EMPNO=[$0], result=[COALESCE(null:NULL, $1)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
verifyLogical(root, expectedLogical);

Expand All @@ -155,7 +155,7 @@ public void testCoalesceWithMultipleNonExistentFields() {
RelNode root = getRelNode(ppl);
String expectedLogical =
"LogicalSort(fetch=[1])\n"
+ " LogicalProject(EMPNO=[$0], result=[COALESCE(null:VARCHAR, null:VARCHAR, $1,"
+ " LogicalProject(EMPNO=[$0], result=[COALESCE(null:NULL, null:NULL, $1,"
+ " 'fallback':VARCHAR)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
verifyLogical(root, expectedLogical);
Expand All @@ -175,8 +175,8 @@ public void testCoalesceWithAllNonExistentFields() {
RelNode root = getRelNode(ppl);
String expectedLogical =
"LogicalSort(fetch=[1])\n"
+ " LogicalProject(EMPNO=[$0], result=[COALESCE(null:VARCHAR, null:VARCHAR,"
+ " null:VARCHAR)])\n"
+ " LogicalProject(EMPNO=[$0], result=[COALESCE(null:NULL, null:NULL,"
+ " null:NULL)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
verifyLogical(root, expectedLogical);

Expand Down Expand Up @@ -217,6 +217,32 @@ public void testCoalesceWithSpaceString() {
verifyPPLToSparkSQL(root, expectedSparkSql);
}

@Test
public void testCoalesceWithNullAndIntegerLiteral() {
// Regression test for https://github.com/opensearch-project/sql/issues/5175
// COALESCE(null, 42) should return INTEGER, not VARCHAR
String ppl = "source=EMP | eval x = coalesce(null, 42) | fields EMPNO, x | head 1";
RelNode root = getRelNode(ppl);
String expectedLogical =
"LogicalSort(fetch=[1])\n"
+ " LogicalProject(EMPNO=[$0], x=[COALESCE(null:NULL, 42)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
verifyLogical(root, expectedLogical);
}

@Test
public void testCoalesceWithIntegerAndNullLiteral() {
// Regression test for https://github.com/opensearch-project/sql/issues/5175
// COALESCE(42, null) should return INTEGER, not VARCHAR
String ppl = "source=EMP | eval x = coalesce(42, null) | fields EMPNO, x | head 1";
RelNode root = getRelNode(ppl);
String expectedLogical =
"LogicalSort(fetch=[1])\n"
+ " LogicalProject(EMPNO=[$0], x=[COALESCE(42, null:NULL)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
verifyLogical(root, expectedLogical);
}

@Test
public void testCoalesceTypeInferenceWithNonNullableOperands() {
String ppl =
Expand Down
Loading