@@ -32,12 +32,12 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase
3232 " Measure" should " be based on the dataframe" in {
3333
3434 // Measures
35- val measureIds : AtumMeasure = RecordCount ()
36- val salaryAbsSum : AtumMeasure = AbsSumOfValuesOfColumn (
37- measuredCol = " salary"
38- )
39- val salarySum = SumOfValuesOfColumn (measuredCol = " salary" )
40- val sumOfHashes : AtumMeasure = SumOfHashesOfColumn (measuredCol = " id" )
35+ val measureIds : AtumMeasure = RecordCount ()
36+ val salaryAbsSum : AtumMeasure = AbsSumOfValuesOfColumn (measuredCol = " salary " )
37+ val salarySum = SumOfValuesOfColumn ( measuredCol = " salary" )
38+ val salaryTruncSum = SumOfTruncatedValuesOfColumn (measuredCol = " salary " )
39+ val salaryAbsTruncSum = AbsSumOfTruncatedValuesOfColumn (measuredCol = " salary" )
40+ val sumOfHashes : AtumMeasure = SumOfHashesOfColumn (measuredCol = " id" )
4141
4242 // AtumContext contains `Measurement`
4343 val atumContextInstanceWithRecordCount = AtumAgent
@@ -86,12 +86,34 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase
8686 .removeMeasure(salaryAbsSum)
8787 )
8888
89+ val dfExtraPersonWithDecimalSalary = spark
90+ .createDataFrame(
91+ Seq (
92+ (" id" , " firstName" , " lastName" , " email" , " email2" , " profession" , " 3000.98" ),
93+ (" id" , " firstName" , " lastName" , " email" , " email2" , " profession" , " -1000.76" )
94+ )
95+ )
96+ .toDF(" id" , " firstName" , " lastName" , " email" , " email2" , " profession" , " salary" )
97+
98+ val dfExtraDecimalPerson = dfExtraPersonWithDecimalSalary.union(dfPersons)
99+
100+ dfExtraDecimalPerson.createCheckpoint(" a checkpoint name" )(
101+ atumContextWithSalaryAbsMeasure
102+ .removeMeasure(measureIds)
103+ .removeMeasure(salaryAbsSum)
104+ )
105+
106+
89107 val dfPersonCntResult = measureIds.function(dfPersons)
90108 val dfFullCntResult = measureIds.function(dfFull)
91109 val dfFullSalaryAbsSumResult = salaryAbsSum.function(dfFull)
92110 val dfFullHashResult = sumOfHashes.function(dfFull)
93111 val dfExtraPersonSalarySumResult = salarySum.function(dfExtraPerson)
94112 val dfFullSalarySumResult = salarySum.function(dfFull)
113+ val dfExtraPersonSalarySumTruncResult = salaryTruncSum.function(dfExtraDecimalPerson)
114+ val dfFullSalarySumTruncResult = salaryTruncSum.function(dfFull)
115+ val dfExtraPersonSalaryAbsSumTruncResult = salaryAbsTruncSum.function(dfExtraDecimalPerson)
116+ val dfFullSalaryAbsSumTruncResult = salaryAbsTruncSum.function(dfFull)
95117
96118 // Assertions
97119 assert(dfPersonCntResult.resultValue == " 1000" )
@@ -106,6 +128,14 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase
106128 assert(dfExtraPersonSalarySumResult.resultValueType == ResultValueType .BigDecimalValue )
107129 assert(dfFullSalarySumResult.resultValue == " 2987144" )
108130 assert(dfFullSalarySumResult.resultValueType == ResultValueType .BigDecimalValue )
131+ assert(dfExtraPersonSalarySumTruncResult.resultValue == " 2989144" )
132+ assert(dfExtraPersonSalarySumTruncResult.resultValueType == ResultValueType .LongValue )
133+ assert(dfFullSalarySumTruncResult.resultValue == " 2987144" )
134+ assert(dfFullSalarySumTruncResult.resultValueType == ResultValueType .LongValue )
135+ assert(dfExtraPersonSalaryAbsSumTruncResult.resultValue == " 2991144" )
136+ assert(dfExtraPersonSalaryAbsSumTruncResult.resultValueType == ResultValueType .LongValue )
137+ assert(dfFullSalaryAbsSumTruncResult.resultValue == " 2987144" )
138+ assert(dfFullSalaryAbsSumTruncResult.resultValueType == ResultValueType .LongValue )
109139 }
110140
111141 " AbsSumOfValuesOfColumn" should " return expected value" in {
@@ -187,4 +217,33 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase
187217 assert(result.resultValueType == ResultValueType .BigDecimalValue )
188218 }
189219
220+ " SumTruncOfValuesOfColumn" should " return expected value" in {
221+ val distinctCount = SumOfTruncatedValuesOfColumn (" colA" )
222+
223+ val data = List (Row (" 1.98" , " b1" ), Row (" -1.76" , " b2" ), Row (" 1.54" , " b2" ), Row (" 1.32" , " b2" ))
224+ val rdd = spark.sparkContext.parallelize(data)
225+
226+ val schema = StructType (Array (StructField (" colA" , StringType ), StructField (" colB" , StringType )))
227+ val df = spark.createDataFrame(rdd, schema)
228+
229+ val result = distinctCount.function(df)
230+
231+ assert(result.resultValue == " 2" )
232+ assert(result.resultValueType == ResultValueType .LongValue )
233+ }
234+
235+ " AbsSumTruncOfValuesOfColumn" should " return expected value" in {
236+ val distinctCount = AbsSumOfTruncatedValuesOfColumn (" colA" )
237+
238+ val data = List (Row (" 1.98" , " b1" ), Row (" -1.76" , " b2" ), Row (" 1.54" , " b2" ), Row (" -1.32" , " b2" ))
239+ val rdd = spark.sparkContext.parallelize(data)
240+
241+ val schema = StructType (Array (StructField (" colA" , StringType ), StructField (" colB" , StringType )))
242+ val df = spark.createDataFrame(rdd, schema)
243+
244+ val result = distinctCount.function(df)
245+
246+ assert(result.resultValue == " 4" )
247+ assert(result.resultValueType == ResultValueType .LongValue )
248+ }
190249}
0 commit comments