Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,19 @@ block = [
]

block-header = [
chain-id: ULID,
chain-id: UUID, ; UUID v7
height: int,
timestamp: #6.1(uint .ge 1722470400), ; Epoch-based date/time
prev-block-id: hash-bytes, ; hash of the previous block
?ledger-type: UUID,
?purpose-id: ULID / UUID,
?ledger-type: UUID, ; UUID v4
?purpose-id: UUID, ; UUID v7
?validator,
~metadata,
]

block-data = encoded-cbor

UUID = #6.37(bytes) ; UUID type
ULID = #6.32780(bytes) ; ULID type

kid = hash-bytes ; hash of the x509/c509 certificate

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,14 @@
; https://datatracker.ietf.org/doc/html/draft-ietf-cbor-cde-06

genesis-to-prev-hash = [
chain-id: ULID,
chain-id: UUID, ; UUID v7
timestamp: #6.1(uint .ge 1722470400), ; Epoch-based date/time
ledger-type: UUID,
purpose-id: ULID / UUID,
ledger-type: UUID, ; UUID v4
purpose-id: UUID, ; UUID v7
validator,
]

UUID = #6.37(bytes) ; UUID type
ULID = #6.32780(bytes) ; ULID type

validator = (kid / [2* kid])
kid = hash-bytes ; hash of the x509/c509 certificate
Expand Down
3 changes: 1 addition & 2 deletions rust/immutable-ledger/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ license.workspace = true
ed25519-dalek = "2.1.1"
anyhow = "1.0.86"
minicbor = { version = "0.24", features = ["std"] }
uuid = { version = "1.10.0", features = ["v4", "serde"] }
ulid = { version = "1.1.3", features = ["serde", "uuid"] }
uuid = { version = "1.10.0", features = ["v4", "v7", "serde"] }
hex = "0.4.3"
blake2b_simd = "1.0.2"
blake3 = "=0.1.3"
Expand Down
55 changes: 25 additions & 30 deletions rust/immutable-ledger/src/serialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

use anyhow::{bail, Ok};
use blake2b_simd::{self, Params};
use ulid::Ulid;
use uuid::Uuid;

/// Genesis block MUST have 0 value height.
Expand Down Expand Up @@ -70,9 +69,6 @@ const TIMESTAMP_CBOR_TAG: u64 = 1;
/// CBOR tag for UUID
const UUID_CBOR_TAG: u64 = 37;

/// CBOR tag for UUID
const ULID_CBOR_TAG: u64 = 32780;

/// CBOR tags for BLAKE2 and BLAKE3 hash functions
/// `https://github.com/input-output-hk/catalyst-voices/blob/main/docs/src/catalyst-standards/cbor_tags/blake.md`

Expand Down Expand Up @@ -292,7 +288,7 @@ impl Block {
#[derive(Debug, Clone, PartialEq)]
pub struct BlockHeader {
/// Unique identifier of the chain.
pub chain_id: Ulid,
pub chain_id: Uuid,
/// Block height.
pub height: i64,
/// Block epoch-based date/time.
Expand All @@ -305,7 +301,7 @@ pub struct BlockHeader {
pub ledger_type: Uuid,
/// unique identifier of the purpose, each Ledger instance will have a strict time
/// boundaries, so each of them will run for different purposes.
pub purpose_id: Ulid,
pub purpose_id: Uuid,
/// Identifier or identifiers of the entity who was produced and processed a block.
pub validator: Vec<Kid>,
/// Add arbitrary metadata to the block.
Expand All @@ -317,8 +313,8 @@ impl BlockHeader {
#[must_use]
#[allow(clippy::too_many_arguments)]
pub fn new(
chain_id: Ulid, height: i64, block_time_stamp: i64,
previous_block_hash: (HashFunction, Vec<u8>), ledger_type: Uuid, purpose_id: Ulid,
chain_id: Uuid, height: i64, block_time_stamp: i64,
previous_block_hash: (HashFunction, Vec<u8>), ledger_type: Uuid, purpose_id: Uuid,
validator: Vec<Kid>, metadata: Vec<u8>,
) -> Self {
Self {
Expand Down Expand Up @@ -347,8 +343,8 @@ impl BlockHeader {
encoder.array(BLOCK_HEADER_SIZE)?;

// Chain id
encoder.tag(minicbor::data::Tag::new(ULID_CBOR_TAG))?;
encoder.bytes(&self.chain_id.to_bytes())?;
encoder.tag(minicbor::data::Tag::new(UUID_CBOR_TAG))?;
encoder.bytes(self.chain_id.as_bytes())?;

// Block height
encoder.int(self.height.into())?;
Expand All @@ -372,8 +368,8 @@ impl BlockHeader {
encoder.bytes(self.ledger_type.as_bytes())?;

// Purpose id
encoder.tag(minicbor::data::Tag::new(ULID_CBOR_TAG))?;
encoder.bytes(&self.purpose_id.to_bytes())?;
encoder.tag(minicbor::data::Tag::new(UUID_CBOR_TAG))?;
encoder.bytes(self.purpose_id.as_bytes())?;

// Validators
encoder.array(self.validator.len().try_into()?)?;
Expand Down Expand Up @@ -405,7 +401,7 @@ impl BlockHeader {

// Raw chain_id
cbor_decoder.tag()?;
let chain_id = Ulid::from_bytes(
let chain_id = Uuid::from_bytes(
cbor_decoder
.bytes()
.map_err(|e| anyhow::anyhow!(format!("Invalid cbor for chain id : {e}")))?
Expand Down Expand Up @@ -443,7 +439,7 @@ impl BlockHeader {

// Raw purpose id
cbor_decoder.tag()?;
let purpose_id = Ulid::from_bytes(
let purpose_id = Uuid::from_bytes(
cbor_decoder
.bytes()
.map_err(|e| anyhow::anyhow!(format!("Invalid cbor for purpose id : {e}")))?
Expand Down Expand Up @@ -489,7 +485,7 @@ impl BlockHeader {
/// Genesis block previous identifier type i.e hash of itself
pub struct GenesisPreviousHash {
/// Unique identifier of the chain.
pub chain_id: Ulid,
pub chain_id: Uuid,
/// Block epoch-based date/time.
pub block_time_stamp: i64,
/// unique identifier of the ledger type.
Expand All @@ -498,7 +494,7 @@ pub struct GenesisPreviousHash {
pub ledger_type: Uuid,
/// unique identifier of the purpose, each Ledger instance will have a strict time
/// boundaries, so each of them will run for different purposes.
pub purpose_id: Ulid,
pub purpose_id: Uuid,
/// Identifier or identifiers of the entity who was produced and processed a block.
pub validator: Vec<Kid>,
}
Expand All @@ -507,7 +503,7 @@ impl GenesisPreviousHash {
/// Create previous block id
#[must_use]
pub fn new(
chain_id: Ulid, block_time_stamp: i64, ledger_type: Uuid, purpose_id: Ulid,
chain_id: Uuid, block_time_stamp: i64, ledger_type: Uuid, purpose_id: Uuid,
validator: Vec<Kid>,
) -> Self {
Self {
Expand All @@ -532,8 +528,8 @@ impl GenesisPreviousHash {
encoder.array(GENESIS_TO_PREV_HASH_SIZE)?;

// Chain id
encoder.tag(minicbor::data::Tag::new(ULID_CBOR_TAG))?;
encoder.bytes(&self.chain_id.to_bytes())?;
encoder.tag(minicbor::data::Tag::new(UUID_CBOR_TAG))?;
encoder.bytes(self.chain_id.as_bytes())?;

// Block timestamp
encoder.tag(minicbor::data::Tag::new(TIMESTAMP_CBOR_TAG))?;
Expand All @@ -549,8 +545,8 @@ impl GenesisPreviousHash {
encoder.bytes(self.ledger_type.as_bytes())?;

// Purpose id
encoder.tag(minicbor::data::Tag::new(ULID_CBOR_TAG))?;
encoder.bytes(&self.purpose_id.to_bytes())?;
encoder.tag(minicbor::data::Tag::new(UUID_CBOR_TAG))?;
encoder.bytes(self.purpose_id.as_bytes())?;

// Validators
encoder.array(self.validator.len().try_into()?)?;
Expand Down Expand Up @@ -586,7 +582,6 @@ mod tests {

use ed25519_dalek::{Signature, Signer, SigningKey, SECRET_KEY_LENGTH};
use test_strategy::proptest;
use ulid::Ulid;
use uuid::Uuid;

use super::{BlockHeader, Kid};
Expand All @@ -609,12 +604,12 @@ mod tests {
.unwrap();

let block_hdr = BlockHeader::new(
Ulid::new(),
Uuid::now_v7(),
block_height,
block_timestamp,
(Blake2b, prev_block_hash),
Uuid::new_v4(),
Ulid::new(),
Uuid::now_v7(),
vec![Kid(kid_a), Kid(kid_b)],
metadata,
);
Expand Down Expand Up @@ -660,12 +655,12 @@ mod tests {
.unwrap();

let block_hdr = BlockHeader::new(
Ulid::new(),
Uuid::now_v7(),
block_height,
block_timestamp,
(Blake2b, prev_block_hash),
Uuid::new_v4(),
Ulid::new(),
Uuid::now_v7(),
vec![Kid(kid_a), Kid(kid_b)],
metadata,
);
Expand Down Expand Up @@ -751,9 +746,9 @@ mod tests {
.try_into()
.unwrap();

let chain_id = Ulid::new();
let chain_id = Uuid::now_v7();
let ledger_type = Uuid::new_v4();
let purpose_id = Ulid::new();
let purpose_id = Uuid::now_v7();

let block_hdr = BlockHeader::new(
chain_id,
Expand Down Expand Up @@ -824,9 +819,9 @@ mod tests {
073, 197, 105, 123, 050, 105, 025, 112, 059, 172, 003, 028, 174, 127, 096,
];

let chain_id = Ulid::new();
let chain_id = Uuid::now_v7();
let ledger_type = Uuid::new_v4();
let purpose_id = Ulid::new();
let purpose_id = Uuid::now_v7();
let block_time_stamp = 1_728_474_515;

let kid_a: [u8; 16] = hex::decode("00112233445566778899aabbccddeeff")
Expand Down
Loading