Skip to content

Commit c58a7f7

Browse files
Merge pull request #31 from mitchelllisle/copilot/fix-fdd27117-d65f-4381-9367-15645fb23502
Convert MerkleTree timestamp from Long to java.time.Instant for better type safety
2 parents bdded92 + 348b282 commit c58a7f7

File tree

6 files changed

+34
-22
lines changed

6 files changed

+34
-22
lines changed

.github/workflows/build-test.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ jobs:
3030
sudo apt-get install -y sbt
3131
3232
- name: Run tests
33-
run: sbt -J--add-opens=java.base/sun.nio.ch=ALL-UNNAMED clean test
33+
run: make test
3434

3535
- name: Upload coverage reports to Codecov
3636
uses: codecov/codecov-action@v3

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,5 +13,5 @@ format:
1313
sbt scalafmt
1414

1515
test:
16-
sbt -J--add-opens=java.base/sun.nio.ch=ALL-UNNAMED test
16+
sbt -J--add-opens=java.base/sun.nio.ch=ALL-UNNAMED coverage test coverageReport
1717
@make clean

build.sbt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
ThisBuild / version := "1.5.0"
1+
ThisBuild / version := "1.6.0"
22

33
ThisBuild / scalaVersion := "2.12.20"
44

@@ -24,7 +24,7 @@ libraryDependencies ++= Seq(
2424
"io.circe" %% "circe-yaml" % "1.15.0",
2525
"io.circe" %% "circe-core" % circeVersion,
2626
"io.circe" %% "circe-parser" % circeVersion,
27-
"com.swoop" %% "spark-alchemy" % "1.2.1"
27+
"com.swoop" %% "spark-alchemy" % "1.2.1",
2828
)
2929

3030
dependencyOverrides += "org.scala-lang.modules" %% "scala-xml" % "2.4.0"

project/plugins.sbt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.3.1")

src/main/scala/org/mitchelllisle/analysers/MerkleTree.scala

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,28 @@
11
package org.mitchelllisle.analysers
22

33
import org.apache.spark.sql.{DataFrame, functions => F}
4-
import org.apache.spark.sql.types._
54
import java.security.MessageDigest
5+
import java.time.Instant
66
import scala.annotation.tailrec
77

8+
case class MerkleProof(
9+
rootHash: String,
10+
recordCount: Long,
11+
leafHashes: Seq[String],
12+
timestamp: Instant = Instant.now()
13+
)
14+
15+
case class DeletionProof(
16+
beforeProof: MerkleProof,
17+
afterProof: MerkleProof,
18+
deletedRecordHashes: Seq[String],
19+
merklePathProofs: Seq[String]
20+
)
21+
822
/** MerkleTreeAnalyser provides cryptographic proof capabilities for data retention and deletion verification.
923
* This complements the KHyperLogLogAnalyser by adding tamper-evident audit trails.
1024
*/
1125
object MerkleTree {
12-
13-
case class MerkleProof(
14-
rootHash: String,
15-
recordCount: Long,
16-
leafHashes: Seq[String],
17-
timestamp: Long = System.currentTimeMillis()
18-
)
19-
20-
case class DeletionProof(
21-
beforeProof: MerkleProof,
22-
afterProof: MerkleProof,
23-
deletedRecordHashes: Seq[String],
24-
merklePathProofs: Seq[String]
25-
)
26-
2726
/** Main entry point for creating a Merkle proof. This is the standard way to use MerkleTree.
2827
*
2928
* @param data The DataFrame to create proof for
@@ -98,7 +97,7 @@ object MerkleTree {
9897
rootHash = rootHash,
9998
recordCount = recordCount,
10099
leafHashes = leafHashes,
101-
timestamp = System.currentTimeMillis()
100+
timestamp = Instant.now()
102101
)
103102
}
104103

src/test/scala/MerkleTreeTest.scala

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import org.apache.spark.sql.DataFrame
22
import org.mitchelllisle.analysers.MerkleTree
33
import org.scalatest.flatspec.AnyFlatSpec
4+
import java.time.Instant
45

56
class MerkleTreeTest extends AnyFlatSpec with SparkFunSuite {
67
import spark.implicits._
@@ -21,7 +22,8 @@ class MerkleTreeTest extends AnyFlatSpec with SparkFunSuite {
2122
assert(proof.rootHash.nonEmpty)
2223
assert(proof.recordCount == 4)
2324
assert(proof.leafHashes.length == 4)
24-
assert(proof.timestamp > 0)
25+
assert(proof.timestamp.isInstanceOf[Instant])
26+
assert(proof.timestamp.isBefore(Instant.now().plusSeconds(1)))
2527
}
2628

2729
"apply" should "produce same result as createMerkleProof" in {
@@ -183,4 +185,14 @@ class MerkleTreeTest extends AnyFlatSpec with SparkFunSuite {
183185

184186
assert(hashes1.sameElements(hashes2))
185187
}
188+
189+
"timestamp" should "use proper Instant type and be recent" in {
190+
val beforeTime = Instant.now().minusSeconds(1)
191+
val proof = MerkleTree.createMerkleProof(testData, columns, idColumn)
192+
val afterTime = Instant.now().plusSeconds(1)
193+
194+
assert(proof.timestamp.isInstanceOf[Instant])
195+
assert(proof.timestamp.isAfter(beforeTime))
196+
assert(proof.timestamp.isBefore(afterTime))
197+
}
186198
}

0 commit comments

Comments
 (0)