Skip to content

Commit d567ec8

Browse files
authored
Scale of decimal literal should always be positive in Calcite (#4401)
* Fix IT Signed-off-by: Lantao Jin <[email protected]> * remove unused class Signed-off-by: Lantao Jin <[email protected]> --------- Signed-off-by: Lantao Jin <[email protected]>
1 parent b9e2761 commit d567ec8

File tree

6 files changed

+85
-11
lines changed

6 files changed

+85
-11
lines changed

core/src/main/java/org/opensearch/sql/ast/expression/Literal.java

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
import com.google.common.collect.ImmutableList;
99
import java.math.BigDecimal;
10+
import java.text.DecimalFormat;
1011
import java.util.List;
1112
import lombok.EqualsAndHashCode;
1213
import lombok.Getter;
@@ -25,7 +26,21 @@ public class Literal extends UnresolvedExpression {
2526

2627
public Literal(Object value, DataType dataType) {
2728
if (dataType == DataType.DECIMAL && value instanceof Double) {
28-
this.value = BigDecimal.valueOf((Double) value);
29+
// This branch is only used in testing for backward compatibility:
30+
// We accept decimal literal by Literal(double, DataType.DECIMAL).
31+
// Some double values such as 0.0001 will be converted to string "1.0E-4" and finally
32+
// generate decimal 0.00010. So here we parse a decimal text to Double then convert it
33+
// to BigDecimal as well.
34+
// In v2, a decimal literal will be converted back to double in resolving expression
35+
// via ExprDoubleValue.
36+
// In v3, a decimal literal will be kept in Calcite RexNode and converted back to double
37+
// in runtime.
38+
DecimalFormat df = new DecimalFormat();
39+
df.setMaximumFractionDigits(38);
40+
df.setMinimumFractionDigits(1);
41+
df.setGroupingUsed(false);
42+
String plain = df.format(value);
43+
this.value = new BigDecimal(plain);
2944
} else {
3045
this.value = value;
3146
}

core/src/main/java/org/opensearch/sql/executor/OpenSearchTypeSystem.java

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,23 @@
1717

1818
public class OpenSearchTypeSystem extends RelDataTypeSystemImpl {
1919
public static final RelDataTypeSystem INSTANCE = new OpenSearchTypeSystem();
20+
// same with Spark DecimalType.MAX_PRECISION
21+
public static int MAX_PRECISION = 38;
22+
// same with Spark DecimalType.MAX_SCALE
23+
public static int MAX_SCALE = 38;
2024

2125
private OpenSearchTypeSystem() {}
2226

27+
@Override
28+
public int getMaxNumericPrecision() {
29+
return MAX_PRECISION;
30+
}
31+
32+
@Override
33+
public int getMaxNumericScale() {
34+
return MAX_SCALE;
35+
}
36+
2337
@Override
2438
public RelDataType deriveAvgAggType(RelDataTypeFactory typeFactory, RelDataType argumentType) {
2539
if (SqlTypeName.DECIMAL == argumentType.getSqlTypeName()) {
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
setup:
2+
- do:
3+
query.settings:
4+
body:
5+
transient:
6+
plugins.calcite.enabled : true
7+
- do:
8+
bulk:
9+
index: test
10+
refresh: true
11+
body:
12+
- '{"index": {}}'
13+
- '{"id": 1}'
14+
15+
---
16+
teardown:
17+
- do:
18+
query.settings:
19+
body:
20+
transient:
21+
plugins.calcite.enabled : false
22+
23+
---
24+
"big decimal literal":
25+
- skip:
26+
features:
27+
- headers
28+
- allowed_warnings
29+
- do:
30+
headers:
31+
Content-Type: 'application/json'
32+
ppl:
33+
body:
34+
query: source=test | eval floor = FLOOR(9223372036854775807.0000001) | fields floor
35+
36+
- match: { total: 1 }
37+
- match: {"datarows": [[9.223372036854776E18]]}

opensearch/src/main/java/org/opensearch/sql/opensearch/util/JdbcOpenSearchDataTypeConvertor.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,7 @@ public static ExprValue getExprValueFromSqlType(
9898
return ExprValueUtils.fromObjectValue(rs.getFloat(i));
9999

100100
case Types.DECIMAL:
101+
return ExprValueUtils.fromObjectValue(rs.getBigDecimal(i));
101102
case Types.NUMERIC:
102103
case Types.DOUBLE:
103104
return ExprValueUtils.fromObjectValue(rs.getDouble(i));

ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -575,16 +575,7 @@ public UnresolvedExpression visitIntegerLiteral(IntegerLiteralContext ctx) {
575575

576576
@Override
577577
public UnresolvedExpression visitDecimalLiteral(DecimalLiteralContext ctx) {
578-
// For backward compatibility, we accept decimal literal by `Literal(double, DataType.DECIMAL)`
579-
// The double value will be converted to decimal by BigDecimal.valueOf((Double) value),
580-
// some double values such as 0.0001 will be converted to string "1.0E-4" and finally
581-
// generate decimal 0.00010. So here we parse a decimal text to Double then convert it
582-
// to BigDecimal as well.
583-
// In v2, a decimal literal will be converted back to double in resolving expression
584-
// via ExprDoubleValue.
585-
// In v3, a decimal literal will be kept in Calcite RexNode and converted back to double
586-
// in runtime.
587-
return new Literal(BigDecimal.valueOf(Double.parseDouble(ctx.getText())), DataType.DECIMAL);
578+
return new Literal(new BigDecimal(ctx.getText()), DataType.DECIMAL);
588579
}
589580

590581
@Override

ppl/src/test/java/org/opensearch/sql/ppl/calcite/CalcitePPLMathFunctionTest.java

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,22 @@ public void testFloor() {
170170
verifyPPLToSparkSQL(root, expectedSparkSql);
171171
}
172172

173+
@Test
174+
public void testFloorBigDecimal() {
175+
RelNode root =
176+
getRelNode(
177+
"source=EMP | head 1 | eval FLOOR1 = floor(9223372036854775807.0000001) | fields"
178+
+ " FLOOR1");
179+
String expectedLogical =
180+
"LogicalProject(FLOOR1=[FLOOR(9223372036854775807.0000001:DECIMAL(26, 7))])\n"
181+
+ " LogicalSort(fetch=[1])\n"
182+
+ " LogicalTableScan(table=[[scott, EMP]])\n";
183+
verifyLogical(root, expectedLogical);
184+
String expectedSparkSql =
185+
"SELECT FLOOR(9223372036854775807.0000001) `FLOOR1`\nFROM `scott`.`EMP`\nLIMIT 1";
186+
verifyPPLToSparkSQL(root, expectedSparkSql);
187+
}
188+
173189
@Test
174190
public void testLn() {
175191
RelNode root = getRelNode("source=EMP | eval LN = ln(2) | fields LN");

0 commit comments

Comments
 (0)