@@ -37,6 +37,8 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase
3737 measuredCol = " salary"
3838 )
3939 val salarySum = SumOfValuesOfColumn (measuredCol = " salary" )
40+ val salaryTruncSum = SumOfTruncatedValuesOfColumn (measuredCol = " salary" )
41+ val salaryAbsTruncSum = AbsSumOfTruncatedValuesOfColumn (measuredCol = " salary" )
4042 val sumOfHashes : AtumMeasure = SumOfHashesOfColumn (measuredCol = " id" )
4143
4244 // AtumContext contains `Measurement`
@@ -86,12 +88,34 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase
8688 .removeMeasure(salaryAbsSum)
8789 )
8890
91+ val dfExtraPersonWithDecimalSalary = spark
92+ .createDataFrame(
93+ Seq (
94+ (" id" , " firstName" , " lastName" , " email" , " email2" , " profession" , " 3000.98" ),
95+ (" id" , " firstName" , " lastName" , " email" , " email2" , " profession" , " -1000.76" )
96+ )
97+ )
98+ .toDF(" id" , " firstName" , " lastName" , " email" , " email2" , " profession" , " salary" )
99+
100+ val dfExtraDecimalPerson = dfExtraPersonWithDecimalSalary.union(dfPersons)
101+
102+ dfExtraDecimalPerson.createCheckpoint(" a checkpoint name" )(
103+ atumContextWithSalaryAbsMeasure
104+ .removeMeasure(measureIds)
105+ .removeMeasure(salaryAbsSum)
106+ )
107+
108+
89109 val dfPersonCntResult = measureIds.function(dfPersons)
90110 val dfFullCntResult = measureIds.function(dfFull)
91111 val dfFullSalaryAbsSumResult = salaryAbsSum.function(dfFull)
92112 val dfFullHashResult = sumOfHashes.function(dfFull)
93113 val dfExtraPersonSalarySumResult = salarySum.function(dfExtraPerson)
94114 val dfFullSalarySumResult = salarySum.function(dfFull)
115+ val dfExtraPersonSalarySumTruncResult = salaryTruncSum.function(dfExtraDecimalPerson)
116+ val dfFullSalarySumTruncResult = salaryTruncSum.function(dfFull)
117+ val dfExtraPersonSalaryAbsSumTruncResult = salaryAbsTruncSum.function(dfExtraDecimalPerson)
118+ val dfFullSalaryAbsSumTruncResult = salaryAbsTruncSum.function(dfFull)
95119
96120 // Assertions
97121 assert(dfPersonCntResult.resultValue == " 1000" )
@@ -106,6 +130,14 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase
106130 assert(dfExtraPersonSalarySumResult.resultValueType == ResultValueType .BigDecimalValue )
107131 assert(dfFullSalarySumResult.resultValue == " 2987144" )
108132 assert(dfFullSalarySumResult.resultValueType == ResultValueType .BigDecimalValue )
133+ assert(dfExtraPersonSalarySumTruncResult.resultValue == " 2989144" )
134+ assert(dfExtraPersonSalarySumTruncResult.resultValueType == ResultValueType .BigDecimalValue )
135+ assert(dfFullSalarySumTruncResult.resultValue == " 2987144" )
136+ assert(dfFullSalarySumTruncResult.resultValueType == ResultValueType .BigDecimalValue )
137+ assert(dfExtraPersonSalaryAbsSumTruncResult.resultValue == " 2991144" )
138+ assert(dfExtraPersonSalaryAbsSumTruncResult.resultValueType == ResultValueType .BigDecimalValue )
139+ assert(dfFullSalaryAbsSumTruncResult.resultValue == " 2987144" )
140+ assert(dfFullSalaryAbsSumTruncResult.resultValueType == ResultValueType .BigDecimalValue )
109141 }
110142
111143 " AbsSumOfValuesOfColumn" should " return expected value" in {
@@ -187,4 +219,33 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase
187219 assert(result.resultValueType == ResultValueType .BigDecimalValue )
188220 }
189221
222+ " SumTruncOfValuesOfColumn" should " return expected value" in {
223+ val distinctCount = SumOfTruncatedValuesOfColumn (" colA" )
224+
225+ val data = List (Row (" 1.98" , " b1" ), Row (" -1.76" , " b2" ), Row (" 1.54" , " b2" ), Row (" 1.32" , " b2" ))
226+ val rdd = spark.sparkContext.parallelize(data)
227+
228+ val schema = StructType (Array (StructField (" colA" , StringType ), StructField (" colB" , StringType )))
229+ val df = spark.createDataFrame(rdd, schema)
230+
231+ val result = distinctCount.function(df)
232+
233+ assert(result.resultValue == " 2" )
234+ assert(result.resultValueType == ResultValueType .BigDecimalValue )
235+ }
236+
237+ " AbsSumTruncOfValuesOfColumn" should " return expected value" in {
238+ val distinctCount = AbsSumOfTruncatedValuesOfColumn (" colA" )
239+
240+ val data = List (Row (" 1.98" , " b1" ), Row (" -1.76" , " b2" ), Row (" 1.54" , " b2" ), Row (" -1.32" , " b2" ))
241+ val rdd = spark.sparkContext.parallelize(data)
242+
243+ val schema = StructType (Array (StructField (" colA" , StringType ), StructField (" colB" , StringType )))
244+ val df = spark.createDataFrame(rdd, schema)
245+
246+ val result = distinctCount.function(df)
247+
248+ assert(result.resultValue == " 4" )
249+ assert(result.resultValueType == ResultValueType .BigDecimalValue )
250+ }
190251}
0 commit comments