Skip to content
10 changes: 5 additions & 5 deletions clarity/src/vm/database/sqlite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
use rusqlite::{params, Connection, OptionalExtension};
use stacks_common::types::chainstate::{BlockHeaderHash, StacksBlockId, TrieHash};
use stacks_common::types::sqlite::NO_PARAMS;
use stacks_common::util::db::tx_busy_handler;
use stacks_common::util::db::{tx_busy_handler, SqlEncoded};
use stacks_common::util::hash::Sha512Trunc256Sum;

use super::clarity_store::{make_contract_hash_key, ContractCommitment};
Expand Down Expand Up @@ -147,7 +147,7 @@ impl SqliteConnection {
value: &str,
) -> Result<()> {
let key = format!("clr-meta::{contract_hash}::{key}");
let params = params![bhh, key, value];
let params = params![bhh.sqlhex(), key, value];

if let Err(e) = conn.execute(
"INSERT INTO metadata_table (blockhash, key, value) VALUES (?, ?, ?)",
Expand All @@ -164,7 +164,7 @@ impl SqliteConnection {
from: &StacksBlockId,
to: &StacksBlockId,
) -> Result<()> {
let params = params![to, from];
let params = params![to.sqlhex(), from.sqlhex()];
if let Err(e) = conn.execute(
"UPDATE metadata_table SET blockhash = ? WHERE blockhash = ?",
params,
Expand All @@ -178,7 +178,7 @@ impl SqliteConnection {
pub fn drop_metadata(conn: &Connection, from: &StacksBlockId) -> Result<()> {
if let Err(e) = conn.execute(
"DELETE FROM metadata_table WHERE blockhash = ?",
params![from],
params![from.sqlhex()],
) {
error!("Failed to drop metadata from {from}: {e:?}");
return Err(InterpreterError::DBError(SQL_FAIL_MESSAGE.into()).into());
Expand All @@ -193,7 +193,7 @@ impl SqliteConnection {
key: &str,
) -> Result<Option<String>> {
let key = format!("clr-meta::{contract_hash}::{key}");
let params = params![bhh, key];
let params = params![bhh.sqlhex(), key];

match conn
.query_row(
Expand Down
3 changes: 2 additions & 1 deletion contrib/stacks-inspect/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ use stacks_common::types::chainstate::{
};
use stacks_common::types::net::{PeerAddress, PeerHost};
use stacks_common::types::sqlite::NO_PARAMS;
use stacks_common::util::db::SqlEncoded;
use stacks_common::util::hash::{Hash160, hex_bytes, to_hex};
use stacks_common::util::retry::LogReader;
use stacks_common::util::secp256k1::{Secp256k1PrivateKey, Secp256k1PublicKey};
Expand Down Expand Up @@ -942,7 +943,7 @@ check if the associated microblocks can be downloaded
println!("{cur_burn}, {cur_tip}");
let (next_burn, next_tip) = match
conn.query_row("SELECT parent_burn_header_hash, parent_anchored_block_hash FROM staging_blocks WHERE anchored_block_hash = ? and burn_header_hash = ?",
params![cur_tip, cur_burn], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) {
params![cur_tip.sqlhex(), cur_burn.sqlhex()], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) {
Ok(x) => x,
Err(e) => {
match e {
Expand Down
2 changes: 1 addition & 1 deletion stacks-common/src/types/chainstate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ impl StacksMessageCodec for StacksWorkScore {

impl_byte_array_message_codec!(TrieHash, TRIEHASH_ENCODED_SIZE as u32);
impl_byte_array_message_codec!(Sha512Trunc256Sum, 32);

impl_byte_array_message_codec!(VRFSeed, 32);
impl_byte_array_message_codec!(ConsensusHash, 20);
impl_byte_array_message_codec!(Hash160, 20);
impl_byte_array_message_codec!(BurnchainHeaderHash, 32);
Expand Down
75 changes: 74 additions & 1 deletion stacks-common/src/util/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,17 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.

use std::backtrace::Backtrace;
use std::convert::TryFrom;
use std::io::{Read, Write};
use std::sync::{LazyLock, Mutex};
use std::thread;
use std::time::Instant;

use hashbrown::HashMap;
use rand::{thread_rng, Rng};
use rusqlite::Connection;
use rusqlite::{Connection, Row};

use crate::codec::{read_next, write_next, Error as CodecError, StacksMessageCodec};
use crate::util::sleep_ms;

/// Keep track of DB locks, for deadlock debugging
Expand Down Expand Up @@ -91,3 +94,73 @@ pub fn tx_busy_handler(run_count: i32) -> bool {
sleep_ms(sleep_time_ms);
true
}

/// We use one of a few different encodings for columns that store "byte-string-y" data. That is, data that
/// is either a byte string, or data that is composed of many byte strings. At the time of this
/// writing, all byte-string-y data are stored as hex strings. As part of a system-wide migration
/// process, these fields will be moved over to a binary representation or a SIP-003 representation
/// to save disk space.
///
/// The first byte in a DB-stored byte-string-y column identifies which codec to use, as detailed
/// below. The absence of one of these bytes means to use the legacy codec (i.e. hex string or
/// JSON, depending on the struct). The byte values are not ASCII printable, which ensures that
/// their presence unambiguously identifies which codec to use.
#[repr(C)]
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum ColumnEncoding {
/// The following data is a SIP-003 blob
SIP003 = 0x00,
}

/// Conversion from a u8
impl TryFrom<u8> for ColumnEncoding {
type Error = crate::codec::Error;

fn try_from(val: u8) -> Result<Self, Self::Error> {
match val {
0x00 => Ok(Self::SIP003),
_ => Err(Self::Error::DeserializeError(format!(
"Invalid ColumnEncoding {:02x}",
val
))),
}
}
}

impl ColumnEncoding {
/// Convert to u8 value
pub fn as_u8(&self) -> u8 {
match self {
Self::SIP003 => 0x00,
}
}
}

impl StacksMessageCodec for ColumnEncoding {
fn consensus_serialize<W: Write>(&self, fd: &mut W) -> Result<(), CodecError> {
write_next(fd, &self.as_u8())
}

fn consensus_deserialize<R: Read>(fd: &mut R) -> Result<Self, CodecError> {
let byte: u8 = read_next(fd)?;
Self::try_from(byte)
}
}

/// This is an alternative to rusqlite's ToSql and FromSql traits which takes an optional encoding.
/// If the encoding is None, then the implementation should return a hex string's ASCII bytes.
pub trait SqlEncoded {
fn sql_encoded(&self, encoding: Option<ColumnEncoding>) -> Vec<u8>;
fn sql_decoded(
row: &Row,
column_name: &str,
encoding: Option<ColumnEncoding>,
) -> Result<Self, crate::codec::Error>
where
Self: Sized;

#[inline]
fn sqlhex(&self) -> Vec<u8> {
self.sql_encoded(None)
}
}
123 changes: 115 additions & 8 deletions stacks-common/src/util/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ macro_rules! iterable_enum {
($Name:ident { $($Variant:ident,)* }) =>
{
pub enum $Name {
$($Variant),*,
$($Variant),*,
}
impl $Name {
pub const ALL: &'static [$Name] = &[$($Name::$Variant),*];
Expand Down Expand Up @@ -731,25 +731,132 @@ macro_rules! fmax {
}

#[cfg(feature = "rusqlite")]
#[macro_export]
macro_rules! impl_byte_array_rusqlite_only {
($thing:ident) => {
impl rusqlite::types::FromSql for $thing {
fn column_result(
value: rusqlite::types::ValueRef,
) -> rusqlite::types::FromSqlResult<Self> {
let hex_str = value.as_str()?;
use $crate::codec::StacksMessageCodec;
let byte_slice = value.as_bytes()
.map_err(|e| {
error!("Failed to load column result as bytes: {:?}", &e);
e
})?;
let mut cursor = byte_slice;

// NB: This is a match statement so that if we add more encodings, this won't
// compile without a corresponding alteration.
match $crate::util::db::ColumnEncoding::consensus_deserialize(&mut cursor) {
Ok($crate::util::db::ColumnEncoding::SIP003) => {
// there's a designated encoding. Honor it.
let inst = $thing::consensus_deserialize(&mut cursor)
.map_err(|e| {
error!("Failed to deserialize column from bytes: {:?}, {:?}", &cursor, &e);
rusqlite::types::FromSqlError::InvalidType
})?;
return Ok(inst);
}
Err(_e) => {
// byte code is not recognized, so this must be a hex string
}
}

// no designated encoding byte, so this must be a hex string.
// try to decode it as such (but error out if this is not a valid hex string)
let hex_str = str::from_utf8(&byte_slice)
.map_err(|e| {
error!("Failed to interpret byte string as ASCII hex: {:?}", &e);
rusqlite::types::FromSqlError::InvalidType
})?;

let byte_str = $crate::util::hash::hex_bytes(hex_str)
.map_err(|_e| rusqlite::types::FromSqlError::InvalidType)?;
.map_err(|e| {
error!("Failed to decode hex string {:?}: {:?}", &hex_str, &e);
rusqlite::types::FromSqlError::InvalidType
})?;

let inst = $thing::from_bytes(&byte_str)
.ok_or(rusqlite::types::FromSqlError::InvalidType)?;
.ok_or_else(|| {
error!("Failed to decode bytes to value: {:?}", &byte_str);
rusqlite::types::FromSqlError::InvalidType
})?;

Ok(inst)
}
}

impl rusqlite::types::ToSql for $thing {
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
let hex_str = self.to_hex();
Ok(hex_str.into())
impl $crate::util::db::SqlEncoded for $thing {
fn sql_encoded(&self, encoding: Option<$crate::util::db::ColumnEncoding>) -> Vec<u8> {
use $crate::codec::StacksMessageCodec;
match encoding {
None => {
// hex string
let hex_str = self.to_hex();
hex_str.as_bytes().to_vec()
},
Some($crate::util::db::ColumnEncoding::SIP003) => {
// SIP003 byte string
let bytes = self.serialize_to_vec();
let mut ret = vec![0; bytes.len() + 1];

// SAFETY: ret has enough bytes allocated
ret[0] = $crate::util::db::ColumnEncoding::SIP003.as_u8();
ret[1..].copy_from_slice(&bytes);
ret
}
}
}

fn sql_decoded(row: &rusqlite::Row, column_name: &str, encoding: Option<$crate::util::db::ColumnEncoding>) -> Result<Self, $crate::codec::Error> {
use $crate::codec::StacksMessageCodec;
match encoding {
None => {
// expect hex string
let hex_bin_str = match row.get_ref(column_name)
.map_err(|e| $crate::codec::Error::DeserializeError(format!("DB error loading hex-encoded column '{column_name}': {e:?}")))?
{
rusqlite::types::ValueRef::Text(bytes) => bytes,
rusqlite::types::ValueRef::Blob(bytes) => bytes,
_ => {
return Err($crate::codec::Error::DeserializeError(format!("DB error reading hex-encoded column '{column_name}: neither Text nor Blob affinity")));
}
};

let hex_str = str::from_utf8(hex_bin_str)
.map_err(|e| $crate::codec::Error::DeserializeError(format!("UTF-8 error decoding hex-encoded bytes from '{column_name}: {e:?}")))?;

let byte_str = $crate::util::hash::hex_bytes(&hex_str)
.map_err(|e| $crate::codec::Error::DeserializeError(format!("Hex error reading hex-encoded column '{column_name}': {e:?}")))?;
let inst = $thing::from_bytes(&byte_str)
.ok_or_else(|| $crate::codec::Error::DeserializeError(format!("Instantiation error from {} bytes of hex-encoded column '{column_name}'", &byte_str.len())))?;
Ok(inst)
}
Some($crate::util::db::ColumnEncoding::SIP003) => {
// expect a SIP003 byte string, with a 1-byte prefix
let byte_str = match row.get_ref(column_name)
.map_err(|e| $crate::codec::Error::DeserializeError(format!("DB error loading SIP003-encoded column '{column_name}': {e:?}")))?
{
rusqlite::types::ValueRef::Text(bytes) => bytes,
rusqlite::types::ValueRef::Blob(bytes) => bytes,
_ => {
return Err($crate::codec::Error::DeserializeError(format!("DB error reading SIP003-encoded column '{column_name}: neither Text nor Blob affinity")));
}
};

let Some(encoding_byte) = byte_str.get(0) else {
return Err($crate::codec::Error::DeserializeError("Zero-length bytestring for SIP003-encoded column '{column_name}'".into()));
};
if *encoding_byte != $crate::util::db::ColumnEncoding::SIP003.as_u8() {
return Err($crate::codec::Error::DeserializeError(format!("Column '{column_name}' is not SIP003-encoded; got encoding-byte value {:02x}", encoding_byte)));
}

// SAFETY: byte_str.len() >= 1 due to above checks
let inst = $thing::consensus_deserialize(&mut &byte_str[1..])?;
Ok(inst)
}
}
}
}
};
Expand Down
10 changes: 5 additions & 5 deletions stacks-node/src/burnchains/bitcoin_regtest_controller.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@
};
use stacks::chainstate::burn::db::sortdb::SortitionDB;
use stacks::chainstate::burn::operations::{
BlockstackOperationType, DelegateStxOp, LeaderBlockCommitOp, LeaderKeyRegisterOp, PreStxOp,
StackStxOp, TransferStxOp, VoteForAggregateKeyOp,
BlockstackOperationType, BurnOpMemo, DelegateStxOp, LeaderBlockCommitOp, LeaderKeyRegisterOp,

Check failure on line 39 in stacks-node/src/burnchains/bitcoin_regtest_controller.rs

View workflow job for this annotation

GitHub Actions / Cargo Hack Check / All Crates (Windows/Linux)

unused import: `BurnOpMemo`
PreStxOp, StackStxOp, TransferStxOp, VoteForAggregateKeyOp,
};
#[cfg(test)]
use stacks::chainstate::burn::Opcodes;
Expand Down Expand Up @@ -2559,7 +2559,7 @@
parent_vtxindex: 1, // 0x0001
key_block_ptr: 1432, // 0x00000598
key_vtxindex: 1, // 0x0001
memo: vec![11], // 0x5a >> 3
memo: vec![11].into(), // 0x5a >> 3

burn_fee: 110_000, //relevant for fee calculation when sending the tx
input: (Txid([0x00; 32]), 0),
Expand Down Expand Up @@ -2699,7 +2699,7 @@
public_key: VRFPublicKey::from_private(
&VRFPrivateKey::from_bytes(&[0u8; 32]).unwrap(),
),
memo: vec![],
memo: vec![].into(),
txid: Txid([3u8; 32]),
vtxindex: 0,
block_height: 1,
Expand Down Expand Up @@ -2845,7 +2845,7 @@
parent_vtxindex: 1, // 0x0001
key_block_ptr: 1432, // 0x00000598
key_vtxindex: 1, // 0x0001
memo: vec![11], // 0x5a >> 3
memo: vec![11].into(), // 0x5a >> 3

burn_fee: 0,
input: (Txid([0x00; 32]), 0),
Expand Down
10 changes: 5 additions & 5 deletions stacks-node/src/nakamoto_node/relayer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
RewardSetInfo, BURN_BLOCK_MINED_AT_MODULUS,
};
use stacks::chainstate::burn::operations::{
BlockstackOperationType, LeaderBlockCommitOp, LeaderKeyRegisterOp,
BlockstackOperationType, BurnOpMemo, LeaderBlockCommitOp, LeaderKeyRegisterOp,

Check failure on line 34 in stacks-node/src/nakamoto_node/relayer.rs

View workflow job for this annotation

GitHub Actions / Cargo Hack Check / All Crates (Windows/Linux)

unused import: `BurnOpMemo`
};
use stacks::chainstate::burn::{BlockSnapshot, ConsensusHash};
use stacks::chainstate::nakamoto::coordinator::get_nakamoto_next_recipients;
Expand Down Expand Up @@ -988,7 +988,7 @@
) -> BlockstackOperationType {
BlockstackOperationType::LeaderKeyRegister(LeaderKeyRegisterOp {
public_key: vrf_public_key,
memo: miner_pkh.as_bytes().to_vec(),
memo: miner_pkh.as_bytes().to_vec().into(),
consensus_hash: consensus_hash.clone(),
vtxindex: 0,
txid: Txid([0u8; 32]),
Expand Down Expand Up @@ -1202,7 +1202,7 @@
key_block_ptr: u32::try_from(key.block_height)
.expect("FATAL: burn block height exceeded u32"),
key_vtxindex: u16::try_from(key.op_vtxindex).expect("FATAL: vtxindex exceeded u16"),
memo: vec![STACKS_EPOCH_LATEST_MARKER],
memo: vec![STACKS_EPOCH_LATEST_MARKER].into(),
new_seed: VRFSeed::from_proof(&tip_vrf_proof),
parent_block_ptr: u32::try_from(commit_parent_block_burn_height)
.expect("FATAL: burn block height exceeded u32"),
Expand Down Expand Up @@ -2345,7 +2345,7 @@
"1da75863a7e1ef86f0f550d92b1f77dc60af23694b884b2816b703137ff94e71",
)
.unwrap(),
memo: pubkey_hash.as_ref().to_vec(),
memo: pubkey_hash.as_ref().to_vec().into(),
};
let path = "/tmp/vrf_key.json";
save_activated_vrf_key(path, &key);
Expand All @@ -2369,7 +2369,7 @@
"1da75863a7e1ef86f0f550d92b1f77dc60af23694b884b2816b703137ff94e71",
)
.unwrap(),
memo: pubkey_hash.as_ref().to_vec(),
memo: pubkey_hash.as_ref().to_vec().into(),
};
let path = "/tmp/vrf_key.json";
save_activated_vrf_key(path, &key);
Expand Down
Loading
Loading