Skip to content

Commit b9cae37

Browse files
yaooqinncloud-fan
authored andcommitted
[SPARK-29774][SQL] Date and Timestamp type +/- null should be null as Postgres
# What changes were proposed in this pull request? Add an analyzer rule to convert unresolved `Add`, `Subtract`, etc. to `TimeAdd`, `DateAdd`, etc. according to the following policy: ```scala /** * For [[Add]]: * 1. if both side are interval, stays the same; * 2. else if one side is interval, turns it to [[TimeAdd]]; * 3. else if one side is date, turns it to [[DateAdd]] ; * 4. else stays the same. * * For [[Subtract]]: * 1. if both side are interval, stays the same; * 2. else if the right side is an interval, turns it to [[TimeSub]]; * 3. else if one side is timestamp, turns it to [[SubtractTimestamps]]; * 4. else if the right side is date, turns it to [[DateDiff]]/[[SubtractDates]]; * 5. else if the left side is date, turns it to [[DateSub]]; * 6. else turns it to stays the same. * * For [[Multiply]]: * 1. If one side is interval, turns it to [[MultiplyInterval]]; * 2. otherwise, stays the same. * * For [[Divide]]: * 1. If the left side is interval, turns it to [[DivideInterval]]; * 2. otherwise, stays the same. */ ``` Besides, we change datetime functions from implicit cast types to strict ones, all available type coercions happen in `DateTimeOperations` coercion rule. ### Why are the changes needed? Feature Parity between PostgreSQL and Spark, and make the null semantic consistent with Spark. ### Does this PR introduce any user-facing change? 1. date_add/date_sub functions only accept int/tinynit/smallint as the second arg, double/string etc, are forbidden like hive, which produce weird results. ### How was this patch tested? add ut Closes apache#26412 from yaooqinn/SPARK-29774. Authored-by: Kent Yao <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent 332e252 commit b9cae37

File tree

10 files changed

+457
-204
lines changed

10 files changed

+457
-204
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -228,6 +228,7 @@ class Analyzer(
228228
ResolveLambdaVariables(conf) ::
229229
ResolveTimeZone(conf) ::
230230
ResolveRandomSeed ::
231+
ResolveBinaryArithmetic(conf) ::
231232
TypeCoercion.typeCoercionRules(conf) ++
232233
extendedResolutionRules : _*),
233234
Batch("PostgreSQL Dialect", Once, PostgreSQLDialect.postgreSQLDialectRules: _*),
@@ -246,6 +247,65 @@ class Analyzer(
246247
CleanupAliases)
247248
)
248249

250+
/**
251+
* For [[Add]]:
252+
* 1. if both side are interval, stays the same;
253+
* 2. else if one side is interval, turns it to [[TimeAdd]];
254+
* 3. else if one side is date, turns it to [[DateAdd]] ;
255+
* 4. else stays the same.
256+
*
257+
* For [[Subtract]]:
258+
* 1. if both side are interval, stays the same;
259+
* 2. else if the right side is an interval, turns it to [[TimeSub]];
260+
* 3. else if one side is timestamp, turns it to [[SubtractTimestamps]];
261+
* 4. else if the right side is date, turns it to [[DateDiff]]/[[SubtractDates]];
262+
* 5. else if the left side is date, turns it to [[DateSub]];
263+
* 6. else turns it to stays the same.
264+
*
265+
* For [[Multiply]]:
266+
* 1. If one side is interval, turns it to [[MultiplyInterval]];
267+
* 2. otherwise, stays the same.
268+
*
269+
* For [[Divide]]:
270+
* 1. If the left side is interval, turns it to [[DivideInterval]];
271+
* 2. otherwise, stays the same.
272+
*/
273+
case class ResolveBinaryArithmetic(conf: SQLConf) extends Rule[LogicalPlan] {
274+
override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
275+
case p: LogicalPlan => p.transformExpressionsUp {
276+
case a @ Add(l, r) if a.childrenResolved => (l.dataType, r.dataType) match {
277+
case (CalendarIntervalType, CalendarIntervalType) => a
278+
case (_, CalendarIntervalType) => Cast(TimeAdd(l, r), l.dataType)
279+
case (CalendarIntervalType, _) => Cast(TimeAdd(r, l), r.dataType)
280+
case (DateType, _) => DateAdd(l, r)
281+
case (_, DateType) => DateAdd(r, l)
282+
case _ => a
283+
}
284+
case s @ Subtract(l, r) if s.childrenResolved => (l.dataType, r.dataType) match {
285+
case (CalendarIntervalType, CalendarIntervalType) => s
286+
case (_, CalendarIntervalType) => Cast(TimeSub(l, r), l.dataType)
287+
case (TimestampType, _) => SubtractTimestamps(l, r)
288+
case (_, TimestampType) => SubtractTimestamps(l, r)
289+
case (_, DateType) => if (conf.usePostgreSQLDialect) {
290+
DateDiff(l, r)
291+
} else {
292+
SubtractDates(l, r)
293+
}
294+
case (DateType, _) => DateSub(l, r)
295+
case _ => s
296+
}
297+
case m @ Multiply(l, r) if m.childrenResolved => (l.dataType, r.dataType) match {
298+
case (CalendarIntervalType, _) => MultiplyInterval(l, r)
299+
case (_, CalendarIntervalType) => MultiplyInterval(r, l)
300+
case _ => m
301+
}
302+
case d @ Divide(l, r) if d.childrenResolved => (l.dataType, r.dataType) match {
303+
case (CalendarIntervalType, _) => DivideInterval(l, r)
304+
case _ => d
305+
}
306+
}
307+
}
308+
}
249309
/**
250310
* Substitute child plan with WindowSpecDefinitions.
251311
*/

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala

Lines changed: 15 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -821,52 +821,24 @@ object TypeCoercion {
821821
}
822822
}
823823

824-
/**
825-
* 1. Turns Add/Subtract of DateType/TimestampType/StringType and CalendarIntervalType
826-
* to TimeAdd/TimeSub.
827-
* 2. Turns Add/Subtract of TimestampType/DateType/IntegerType
828-
* and TimestampType/IntegerType/DateType to DateAdd/DateSub/SubtractDates and
829-
* to SubtractTimestamps.
830-
* 3. Turns Multiply/Divide of CalendarIntervalType and NumericType
831-
* to MultiplyInterval/DivideInterval
832-
*/
833824
object DateTimeOperations extends Rule[LogicalPlan] {
834-
835-
private val acceptedTypes = Seq(DateType, TimestampType, StringType)
836-
837-
def apply(plan: LogicalPlan): LogicalPlan = plan resolveExpressions {
825+
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveExpressions {
838826
// Skip nodes who's children have not been resolved yet.
839827
case e if !e.childrenResolved => e
840-
841-
case Add(l @ CalendarIntervalType(), r) if acceptedTypes.contains(r.dataType) =>
842-
Cast(TimeAdd(r, l), r.dataType)
843-
case Add(l, r @ CalendarIntervalType()) if acceptedTypes.contains(l.dataType) =>
844-
Cast(TimeAdd(l, r), l.dataType)
845-
case Subtract(l, r @ CalendarIntervalType()) if acceptedTypes.contains(l.dataType) =>
846-
Cast(TimeSub(l, r), l.dataType)
847-
case Multiply(l @ CalendarIntervalType(), r @ NumericType()) =>
848-
MultiplyInterval(l, r)
849-
case Multiply(l @ NumericType(), r @ CalendarIntervalType()) =>
850-
MultiplyInterval(r, l)
851-
case Divide(l @ CalendarIntervalType(), r @ NumericType()) =>
852-
DivideInterval(l, r)
853-
854-
case b @ BinaryOperator(l @ CalendarIntervalType(), r @ NullType()) =>
855-
b.withNewChildren(Seq(l, Cast(r, CalendarIntervalType)))
856-
case b @ BinaryOperator(l @ NullType(), r @ CalendarIntervalType()) =>
857-
b.withNewChildren(Seq(Cast(l, CalendarIntervalType), r))
858-
859-
case Add(l @ DateType(), r @ IntegerType()) => DateAdd(l, r)
860-
case Add(l @ IntegerType(), r @ DateType()) => DateAdd(r, l)
861-
case Subtract(l @ DateType(), r @ IntegerType()) => DateSub(l, r)
862-
case Subtract(l @ DateType(), r @ DateType()) =>
863-
if (SQLConf.get.usePostgreSQLDialect) DateDiff(l, r) else SubtractDates(l, r)
864-
case Subtract(l @ TimestampType(), r @ TimestampType()) =>
865-
SubtractTimestamps(l, r)
866-
case Subtract(l @ TimestampType(), r @ DateType()) =>
867-
SubtractTimestamps(l, Cast(r, TimestampType))
868-
case Subtract(l @ DateType(), r @ TimestampType()) =>
869-
SubtractTimestamps(Cast(l, TimestampType), r)
828+
case d @ DateAdd(TimestampType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
829+
case d @ DateAdd(StringType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
830+
case d @ DateSub(TimestampType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
831+
case d @ DateSub(StringType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
832+
833+
case s @ SubtractTimestamps(DateType(), _) =>
834+
s.copy(endTimestamp = Cast(s.endTimestamp, TimestampType))
835+
case s @ SubtractTimestamps(_, DateType()) =>
836+
s.copy(startTimestamp = Cast(s.startTimestamp, TimestampType))
837+
838+
case t @ TimeAdd(DateType(), _, _) => t.copy(start = Cast(t.start, TimestampType))
839+
case t @ TimeAdd(StringType(), _, _) => t.copy(start = Cast(t.start, TimestampType))
840+
case t @ TimeSub(DateType(), _, _) => t.copy(start = Cast(t.start, TimestampType))
841+
case t @ TimeSub(StringType(), _, _) => t.copy(start = Cast(t.start, TimestampType))
870842
}
871843
}
872844

@@ -880,11 +852,8 @@ object TypeCoercion {
880852
case e if !e.childrenResolved => e
881853

882854
// If DecimalType operands are involved, DecimalPrecision will handle it
883-
// If CalendarIntervalType operands are involved, DateTimeOperations will handle it
884855
case b @ BinaryOperator(left, right) if !left.dataType.isInstanceOf[DecimalType] &&
885856
!right.dataType.isInstanceOf[DecimalType] &&
886-
!left.dataType.isInstanceOf[CalendarIntervalType] &&
887-
!right.dataType.isInstanceOf[CalendarIntervalType] &&
888857
left.dataType != right.dataType =>
889858
findTightestCommonType(left.dataType, right.dataType).map { commonType =>
890859
if (b.inputType.acceptsType(commonType)) {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -151,17 +151,18 @@ case class CurrentBatchTimestamp(
151151
""",
152152
since = "1.5.0")
153153
case class DateAdd(startDate: Expression, days: Expression)
154-
extends BinaryExpression with ImplicitCastInputTypes {
154+
extends BinaryExpression with ExpectsInputTypes {
155155

156156
override def left: Expression = startDate
157157
override def right: Expression = days
158158

159-
override def inputTypes: Seq[AbstractDataType] = Seq(DateType, IntegerType)
159+
override def inputTypes: Seq[AbstractDataType] =
160+
Seq(DateType, TypeCollection(IntegerType, ShortType, ByteType))
160161

161162
override def dataType: DataType = DateType
162163

163164
override def nullSafeEval(start: Any, d: Any): Any = {
164-
start.asInstanceOf[Int] + d.asInstanceOf[Int]
165+
start.asInstanceOf[Int] + d.asInstanceOf[Number].intValue()
165166
}
166167

167168
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
@@ -185,16 +186,17 @@ case class DateAdd(startDate: Expression, days: Expression)
185186
""",
186187
since = "1.5.0")
187188
case class DateSub(startDate: Expression, days: Expression)
188-
extends BinaryExpression with ImplicitCastInputTypes {
189+
extends BinaryExpression with ExpectsInputTypes {
189190
override def left: Expression = startDate
190191
override def right: Expression = days
191192

192-
override def inputTypes: Seq[AbstractDataType] = Seq(DateType, IntegerType)
193+
override def inputTypes: Seq[AbstractDataType] =
194+
Seq(DateType, TypeCollection(IntegerType, ShortType, ByteType))
193195

194196
override def dataType: DataType = DateType
195197

196198
override def nullSafeEval(start: Any, d: Any): Any = {
197-
start.asInstanceOf[Int] - d.asInstanceOf[Int]
199+
start.asInstanceOf[Int] - d.asInstanceOf[Number].intValue()
198200
}
199201

200202
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
@@ -1072,7 +1074,7 @@ case class NextDay(startDate: Expression, dayOfWeek: Expression)
10721074
* Adds an interval to timestamp.
10731075
*/
10741076
case class TimeAdd(start: Expression, interval: Expression, timeZoneId: Option[String] = None)
1075-
extends BinaryExpression with TimeZoneAwareExpression with ImplicitCastInputTypes {
1077+
extends BinaryExpression with TimeZoneAwareExpression with ExpectsInputTypes {
10761078

10771079
def this(start: Expression, interval: Expression) = this(start, interval, None)
10781080

@@ -1187,7 +1189,7 @@ case class FromUTCTimestamp(left: Expression, right: Expression)
11871189
* Subtracts an interval from timestamp.
11881190
*/
11891191
case class TimeSub(start: Expression, interval: Expression, timeZoneId: Option[String] = None)
1190-
extends BinaryExpression with TimeZoneAwareExpression with ImplicitCastInputTypes {
1192+
extends BinaryExpression with TimeZoneAwareExpression with ExpectsInputTypes {
11911193

11921194
def this(start: Expression, interval: Expression) = this(start, interval, None)
11931195

@@ -2127,7 +2129,7 @@ case class DatePart(field: Expression, source: Expression, child: Expression)
21272129
* between the given timestamps.
21282130
*/
21292131
case class SubtractTimestamps(endTimestamp: Expression, startTimestamp: Expression)
2130-
extends BinaryExpression with ImplicitCastInputTypes {
2132+
extends BinaryExpression with ExpectsInputTypes {
21312133

21322134
override def left: Expression = endTimestamp
21332135
override def right: Expression = startTimestamp

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala

Lines changed: 0 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -1401,44 +1401,6 @@ class TypeCoercionSuite extends AnalysisTest {
14011401
}
14021402
}
14031403

1404-
test("rule for date/timestamp operations") {
1405-
val dateTimeOperations = TypeCoercion.DateTimeOperations
1406-
val date = Literal(new java.sql.Date(0L))
1407-
val timestamp = Literal(new Timestamp(0L))
1408-
val interval = Literal(new CalendarInterval(0, 0, 0))
1409-
val str = Literal("2015-01-01")
1410-
val intValue = Literal(0, IntegerType)
1411-
1412-
ruleTest(dateTimeOperations, Add(date, interval), Cast(TimeAdd(date, interval), DateType))
1413-
ruleTest(dateTimeOperations, Add(interval, date), Cast(TimeAdd(date, interval), DateType))
1414-
ruleTest(dateTimeOperations, Add(timestamp, interval),
1415-
Cast(TimeAdd(timestamp, interval), TimestampType))
1416-
ruleTest(dateTimeOperations, Add(interval, timestamp),
1417-
Cast(TimeAdd(timestamp, interval), TimestampType))
1418-
ruleTest(dateTimeOperations, Add(str, interval), Cast(TimeAdd(str, interval), StringType))
1419-
ruleTest(dateTimeOperations, Add(interval, str), Cast(TimeAdd(str, interval), StringType))
1420-
1421-
ruleTest(dateTimeOperations, Subtract(date, interval), Cast(TimeSub(date, interval), DateType))
1422-
ruleTest(dateTimeOperations, Subtract(timestamp, interval),
1423-
Cast(TimeSub(timestamp, interval), TimestampType))
1424-
ruleTest(dateTimeOperations, Subtract(str, interval), Cast(TimeSub(str, interval), StringType))
1425-
1426-
// interval operations should not be effected
1427-
ruleTest(dateTimeOperations, Add(interval, interval), Add(interval, interval))
1428-
ruleTest(dateTimeOperations, Subtract(interval, interval), Subtract(interval, interval))
1429-
1430-
ruleTest(dateTimeOperations, Add(date, intValue), DateAdd(date, intValue))
1431-
ruleTest(dateTimeOperations, Add(intValue, date), DateAdd(date, intValue))
1432-
ruleTest(dateTimeOperations, Subtract(date, intValue), DateSub(date, intValue))
1433-
ruleTest(dateTimeOperations, Subtract(date, date), SubtractDates(date, date))
1434-
ruleTest(dateTimeOperations, Subtract(timestamp, timestamp),
1435-
SubtractTimestamps(timestamp, timestamp))
1436-
ruleTest(dateTimeOperations, Subtract(timestamp, date),
1437-
SubtractTimestamps(timestamp, Cast(date, TimestampType)))
1438-
ruleTest(dateTimeOperations, Subtract(date, timestamp),
1439-
SubtractTimestamps(Cast(date, TimestampType), timestamp))
1440-
}
1441-
14421404
/**
14431405
* There are rules that need to not fire before child expressions get resolved.
14441406
* We use this test to make sure those rules do not fire early.
@@ -1586,27 +1548,6 @@ class TypeCoercionSuite extends AnalysisTest {
15861548
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
15871549
Cast(100, DecimalType(34, 24))), Cast(1, IntegerType)))
15881550
}
1589-
1590-
test("rule for interval operations") {
1591-
val dateTimeOperations = TypeCoercion.DateTimeOperations
1592-
val interval = Literal(new CalendarInterval(0, 0, 0))
1593-
1594-
Seq(
1595-
Literal(10.toByte, ByteType),
1596-
Literal(10.toShort, ShortType),
1597-
Literal(10, IntegerType),
1598-
Literal(10L, LongType),
1599-
Literal(Decimal(10), DecimalType.SYSTEM_DEFAULT),
1600-
Literal(10.5.toFloat, FloatType),
1601-
Literal(10.5, DoubleType)).foreach { num =>
1602-
ruleTest(dateTimeOperations, Multiply(interval, num),
1603-
MultiplyInterval(interval, num))
1604-
ruleTest(dateTimeOperations, Multiply(num, interval),
1605-
MultiplyInterval(interval, num))
1606-
ruleTest(dateTimeOperations, Divide(interval, num),
1607-
DivideInterval(interval, num))
1608-
}
1609-
}
16101551
}
16111552

16121553

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -332,6 +332,12 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
332332
}
333333

334334
test("date_add") {
335+
checkEvaluation(
336+
DateAdd(Literal(Date.valueOf("2016-02-28")), Literal(1.toByte)),
337+
DateTimeUtils.fromJavaDate(Date.valueOf("2016-02-29")))
338+
checkEvaluation(
339+
DateAdd(Literal(Date.valueOf("2016-02-28")), Literal(1.toShort)),
340+
DateTimeUtils.fromJavaDate(Date.valueOf("2016-02-29")))
335341
checkEvaluation(
336342
DateAdd(Literal(Date.valueOf("2016-02-28")), Literal(1)),
337343
DateTimeUtils.fromJavaDate(Date.valueOf("2016-02-29")))
@@ -347,10 +353,18 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
347353
DateAdd(Literal(Date.valueOf("2016-02-28")), positiveIntLit), 49627)
348354
checkEvaluation(
349355
DateAdd(Literal(Date.valueOf("2016-02-28")), negativeIntLit), -15910)
356+
checkConsistencyBetweenInterpretedAndCodegen(DateAdd, DateType, ByteType)
357+
checkConsistencyBetweenInterpretedAndCodegen(DateAdd, DateType, ShortType)
350358
checkConsistencyBetweenInterpretedAndCodegen(DateAdd, DateType, IntegerType)
351359
}
352360

353361
test("date_sub") {
362+
checkEvaluation(
363+
DateSub(Literal(Date.valueOf("2015-01-01")), Literal(1.toByte)),
364+
DateTimeUtils.fromJavaDate(Date.valueOf("2014-12-31")))
365+
checkEvaluation(
366+
DateSub(Literal(Date.valueOf("2015-01-01")), Literal(1.toShort)),
367+
DateTimeUtils.fromJavaDate(Date.valueOf("2014-12-31")))
354368
checkEvaluation(
355369
DateSub(Literal(Date.valueOf("2015-01-01")), Literal(1)),
356370
DateTimeUtils.fromJavaDate(Date.valueOf("2014-12-31")))
@@ -366,6 +380,8 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
366380
DateSub(Literal(Date.valueOf("2016-02-28")), positiveIntLit), -15909)
367381
checkEvaluation(
368382
DateSub(Literal(Date.valueOf("2016-02-28")), negativeIntLit), 49628)
383+
checkConsistencyBetweenInterpretedAndCodegen(DateSub, DateType, ByteType)
384+
checkConsistencyBetweenInterpretedAndCodegen(DateSub, DateType, ShortType)
369385
checkConsistencyBetweenInterpretedAndCodegen(DateSub, DateType, IntegerType)
370386
}
371387

sql/core/src/test/resources/sql-tests/inputs/datetime.sql

Lines changed: 42 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,48 @@ select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), week
3030

3131
select year('1500-01-01'), month('1500-01-01'), dayOfYear('1500-01-01');
3232

33-
select date '2001-09-28' + 7;
34-
select 7 + date '2001-09-28';
35-
select date '2001-10-01' - 7;
36-
select date '2001-10-01' - date '2001-09-28';
37-
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678';
38-
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01';
3933

4034
select date '2019-01-01\t';
4135
select timestamp '2019-01-01\t';
36+
37+
-- time add/sub
38+
select timestamp'2011-11-11 11:11:11' + interval '2' day;
39+
select timestamp'2011-11-11 11:11:11' - interval '2' day;
40+
select date'2011-11-11 11:11:11' + interval '2' second;
41+
select date'2011-11-11 11:11:11' - interval '2' second;
42+
select '2011-11-11' - interval '2' day;
43+
select '2011-11-11 11:11:11' - interval '2' second;
44+
select '1' - interval '2' second;
45+
select 1 - interval '2' second;
46+
47+
-- subtract timestamps
48+
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678';
49+
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01';
50+
select timestamp'2019-10-06 10:11:12.345678' - null;
51+
select null - timestamp'2019-10-06 10:11:12.345678';
52+
53+
-- date add/sub
54+
select date_add('2011-11-11', 1Y);
55+
select date_add('2011-11-11', 1S);
56+
select date_add('2011-11-11', 1);
57+
select date_add('2011-11-11', 1L);
58+
select date_add('2011-11-11', 1.0);
59+
select date_add('2011-11-11', 1E1);
60+
select date_add('2011-11-11', '1');
61+
select date_add(date'2011-11-11', 1);
62+
select date_add(timestamp'2011-11-11', 1);
63+
select date_sub(date'2011-11-11', 1);
64+
select date_sub(timestamp'2011-11-11', 1);
65+
select date_sub(null, 1);
66+
select date_sub(date'2011-11-11', null);
67+
select date'2011-11-11' + 1E1;
68+
select null + date '2001-09-28';
69+
select date '2001-09-28' + 7Y;
70+
select 7S + date '2001-09-28';
71+
select date '2001-10-01' - 7;
72+
select date '2001-09-28' + null;
73+
select date '2001-09-28' - null;
74+
75+
-- subtract dates
76+
select null - date '2019-10-06';
77+
select date '2001-10-01' - date '2001-09-28';

0 commit comments

Comments
 (0)