@@ -22,12 +22,13 @@ package org.apache.spark.sql.benchmark
2222import org .apache .spark .sql .catalyst .util .DateTimeTestUtils .{withDefaultTimeZone , LA }
2323import org .apache .spark .sql .internal .SQLConf
2424
25+ // spotless:off
2526/**
2627 * Benchmark to measure Comet execution performance. To run this benchmark:
27- * `SPARK_GENERATE_BENCHMARK_FILES=1 make
28- * benchmark-org.apache.spark.sql.benchmark.CometDatetimeExpressionBenchmark` Results will be
29- * written to "spark/benchmarks/CometDatetimeExpressionBenchmark-**results.txt".
28+ * `SPARK_GENERATE_BENCHMARK_FILES=1 make benchmark-org.apache.spark.sql.benchmark.CometDatetimeExpressionBenchmark`
29+ * Results will be written to "spark/benchmarks/CometDatetimeExpressionBenchmark-**results.txt".
3030 */
31+ // spotless:on
3132object CometDatetimeExpressionBenchmark extends CometBenchmarkBase {
3233
3334 def dateTruncExprBenchmark (values : Int , useDictionary : Boolean ): Unit = {
@@ -76,29 +77,27 @@ object CometDatetimeExpressionBenchmark extends CometBenchmarkBase {
7677 }
7778 }
7879
79- def unixTimestampBenchmark (values : Int , useDictionary : Boolean ): Unit = {
80+ def unixTimestampBenchmark (values : Int ): Unit = {
8081 withTempPath { dir =>
8182 withTempTable(" parquetV1Table" ) {
8283 prepareTable(
8384 dir,
8485 spark.sql(s " select timestamp_micros(cast(value/100000 as integer)) as ts FROM $tbl" ))
85- val isDictionary = if (useDictionary) " (Dictionary)" else " "
86- runWithComet(s " Unix Timestamp from Timestamp $isDictionary" , values) {
86+ runWithComet(s " Unix Timestamp from Timestamp " , values) {
8787 spark.sql(" select unix_timestamp(ts) from parquetV1Table" ).noop()
8888 }
8989 }
9090 }
9191 }
9292
93- def unixTimestampFromDateBenchmark (values : Int , useDictionary : Boolean ): Unit = {
93+ def unixTimestampFromDateBenchmark (values : Int ): Unit = {
9494 withTempPath { dir =>
9595 withTempTable(" parquetV1Table" ) {
9696 prepareTable(
9797 dir,
9898 spark.sql(
9999 s " select cast(timestamp_micros(cast(value/100000 as integer)) as date) as dt FROM $tbl" ))
100- val isDictionary = if (useDictionary) " (Dictionary)" else " "
101- runWithComet(s " Unix Timestamp from Date $isDictionary" , values) {
100+ runWithComet(s " Unix Timestamp from Date " , values) {
102101 spark.sql(" select unix_timestamp(dt) from parquetV1Table" ).noop()
103102 }
104103 }
@@ -125,20 +124,11 @@ object CometDatetimeExpressionBenchmark extends CometBenchmarkBase {
125124 runBenchmarkWithTable(" TimestampTrunc (Dictionary)" , values, useDictionary = true ) { v =>
126125 timestampTruncExprBenchmark(v, useDictionary = true )
127126 }
128- runBenchmarkWithTable(" UnixTimestamp" , values) { v =>
129- unixTimestampBenchmark(v, useDictionary = false )
127+ runBenchmarkWithTable(" UnixTimestamp(timestamp) " , values) { v =>
128+ unixTimestampBenchmark(v)
130129 }
131- runBenchmarkWithTable(" UnixTimestamp (Dictionary)" , values, useDictionary = true ) { v =>
132- unixTimestampBenchmark(v, useDictionary = true )
133- }
134- runBenchmarkWithTable(" UnixTimestamp from Date" , values) { v =>
135- unixTimestampFromDateBenchmark(v, useDictionary = false )
136- }
137- runBenchmarkWithTable(
138- " UnixTimestamp from Date (Dictionary)" ,
139- values,
140- useDictionary = true ) { v =>
141- unixTimestampFromDateBenchmark(v, useDictionary = true )
130+ runBenchmarkWithTable(" UnixTimestamp(date))" , values) { v =>
131+ unixTimestampFromDateBenchmark(v)
142132 }
143133 }
144134 }
0 commit comments