Skip to content

Commit ceadd0d

Browse files
committed
miniscript: use byte arrays in lexer
This is a little less space efficient but it gets rid of a lifetime parameter on the lexer token, which is annoying because it prevents us from putting it into error types. (Currently we convert it to a string before putting it into an error.) It also eliminates a whole bunch of panics where we convert slices to hashes and .expect() on the length being right, when we know it's right.
1 parent 5de4df8 commit ceadd0d

File tree

2 files changed

+51
-50
lines changed

2 files changed

+51
-50
lines changed

src/miniscript/decode.rs

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -373,12 +373,12 @@ pub fn parse<Ctx: ScriptContext>(
373373
tokens,
374374
// pubkey
375375
Tk::Bytes33(pk) => {
376-
let ret = Ctx::Key::from_slice(pk)
376+
let ret = Ctx::Key::from_slice(&pk)
377377
.map_err(|e| Error::PubKeyCtxError(e, Ctx::name_str()))?;
378378
term.reduce0(Terminal::PkK(ret))?
379379
},
380380
Tk::Bytes65(pk) => {
381-
let ret = Ctx::Key::from_slice(pk)
381+
let ret = Ctx::Key::from_slice(&pk)
382382
.map_err(|e| Error::PubKeyCtxError(e, Ctx::name_str()))?;
383383
term.reduce0(Terminal::PkK(ret))?
384384
},
@@ -395,7 +395,7 @@ pub fn parse<Ctx: ScriptContext>(
395395
// after bytes32 means bytes32 is in a hashlock
396396
// Finally for the first case, K being parsed as a solo expression is a Pk type
397397
Tk::Bytes32(pk) => {
398-
let ret = Ctx::Key::from_slice(pk).map_err(|e| Error::PubKeyCtxError(e, Ctx::name_str()))?;
398+
let ret = Ctx::Key::from_slice(&pk).map_err(|e| Error::PubKeyCtxError(e, Ctx::name_str()))?;
399399
term.reduce0(Terminal::PkK(ret))?
400400
},
401401
// checksig
@@ -414,20 +414,20 @@ pub fn parse<Ctx: ScriptContext>(
414414
tokens,
415415
Tk::Dup => {
416416
term.reduce0(Terminal::RawPkH(
417-
hash160::Hash::from_slice(hash).expect("valid size")
417+
hash160::Hash::from_byte_array(hash)
418418
))?
419419
},
420420
Tk::Verify, Tk::Equal, Tk::Num(32), Tk::Size => {
421421
non_term.push(NonTerm::Verify);
422422
term.reduce0(Terminal::Hash160(
423-
hash160::Hash::from_slice(hash).expect("valid size")
423+
hash160::Hash::from_byte_array(hash)
424424
))?
425425
},
426426
),
427427
Tk::Ripemd160, Tk::Verify, Tk::Equal, Tk::Num(32), Tk::Size => {
428428
non_term.push(NonTerm::Verify);
429429
term.reduce0(Terminal::Ripemd160(
430-
ripemd160::Hash::from_slice(hash).expect("valid size")
430+
ripemd160::Hash::from_byte_array(hash)
431431
))?
432432
},
433433
),
@@ -437,13 +437,13 @@ pub fn parse<Ctx: ScriptContext>(
437437
Tk::Sha256, Tk::Verify, Tk::Equal, Tk::Num(32), Tk::Size => {
438438
non_term.push(NonTerm::Verify);
439439
term.reduce0(Terminal::Sha256(
440-
sha256::Hash::from_slice(hash).expect("valid size")
440+
sha256::Hash::from_byte_array(hash)
441441
))?
442442
},
443443
Tk::Hash256, Tk::Verify, Tk::Equal, Tk::Num(32), Tk::Size => {
444444
non_term.push(NonTerm::Verify);
445445
term.reduce0(Terminal::Hash256(
446-
hash256::Hash::from_slice(hash).expect("valid size")
446+
hash256::Hash::from_byte_array(hash)
447447
))?
448448
},
449449
),
@@ -480,14 +480,14 @@ pub fn parse<Ctx: ScriptContext>(
480480
Tk::Equal,
481481
Tk::Num(32),
482482
Tk::Size => term.reduce0(Terminal::Sha256(
483-
sha256::Hash::from_slice(hash).expect("valid size")
483+
sha256::Hash::from_byte_array(hash)
484484
))?,
485485
Tk::Hash256,
486486
Tk::Verify,
487487
Tk::Equal,
488488
Tk::Num(32),
489489
Tk::Size => term.reduce0(Terminal::Hash256(
490-
hash256::Hash::from_slice(hash).expect("valid size")
490+
hash256::Hash::from_byte_array(hash)
491491
))?,
492492
),
493493
Tk::Hash20(hash) => match_token!(
@@ -497,14 +497,14 @@ pub fn parse<Ctx: ScriptContext>(
497497
Tk::Equal,
498498
Tk::Num(32),
499499
Tk::Size => term.reduce0(Terminal::Ripemd160(
500-
ripemd160::Hash::from_slice(hash).expect("valid size")
500+
ripemd160::Hash::from_byte_array(hash)
501501
))?,
502502
Tk::Hash160,
503503
Tk::Verify,
504504
Tk::Equal,
505505
Tk::Num(32),
506506
Tk::Size => term.reduce0(Terminal::Hash160(
507-
hash160::Hash::from_slice(hash).expect("valid size")
507+
hash160::Hash::from_byte_array(hash)
508508
))?,
509509
),
510510
// thresholds
@@ -545,9 +545,9 @@ pub fn parse<Ctx: ScriptContext>(
545545
for _ in 0..n {
546546
match_token!(
547547
tokens,
548-
Tk::Bytes33(pk) => keys.push(<Ctx::Key>::from_slice(pk)
548+
Tk::Bytes33(pk) => keys.push(<Ctx::Key>::from_slice(&pk)
549549
.map_err(|e| Error::PubKeyCtxError(e, Ctx::name_str()))?),
550-
Tk::Bytes65(pk) => keys.push(<Ctx::Key>::from_slice(pk)
550+
Tk::Bytes65(pk) => keys.push(<Ctx::Key>::from_slice(&pk)
551551
.map_err(|e| Error::PubKeyCtxError(e, Ctx::name_str()))?),
552552
);
553553
}
@@ -567,14 +567,14 @@ pub fn parse<Ctx: ScriptContext>(
567567
while tokens.peek() == Some(&Tk::CheckSigAdd) {
568568
match_token!(
569569
tokens,
570-
Tk::CheckSigAdd, Tk::Bytes32(pk) => keys.push(<Ctx::Key>::from_slice(pk)
570+
Tk::CheckSigAdd, Tk::Bytes32(pk) => keys.push(<Ctx::Key>::from_slice(&pk)
571571
.map_err(|e| Error::PubKeyCtxError(e, Ctx::name_str()))?),
572572
);
573573
}
574574
// Last key must be with a CheckSig
575575
match_token!(
576576
tokens,
577-
Tk::CheckSig, Tk::Bytes32(pk) => keys.push(<Ctx::Key>::from_slice(pk)
577+
Tk::CheckSig, Tk::Bytes32(pk) => keys.push(<Ctx::Key>::from_slice(&pk)
578578
.map_err(|e| Error::PubKeyCtxError(e, Ctx::name_str()))?),
579579
);
580580
keys.reverse();

src/miniscript/lex.rs

Lines changed: 35 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,15 @@
88
use core::fmt;
99

1010
use bitcoin::blockdata::{opcodes, script};
11+
use bitcoin::hex::DisplayHex as _;
1112

1213
use super::Error;
1314
use crate::prelude::*;
1415

1516
/// Atom of a tokenized version of a script
1617
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
1718
#[allow(missing_docs)]
18-
pub enum Token<'s> {
19+
pub enum Token {
1920
BoolAnd,
2021
BoolOr,
2122
Add,
@@ -44,22 +45,20 @@ pub enum Token<'s> {
4445
Sha256,
4546
Hash256,
4647
Num(u32),
47-
Hash20(&'s [u8]),
48-
Bytes32(&'s [u8]),
49-
Bytes33(&'s [u8]),
50-
Bytes65(&'s [u8]),
48+
Hash20([u8; 20]),
49+
Bytes32([u8; 32]),
50+
Bytes33([u8; 33]),
51+
Bytes65([u8; 65]),
5152
}
5253

53-
impl fmt::Display for Token<'_> {
54+
impl fmt::Display for Token {
5455
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
5556
match *self {
5657
Token::Num(n) => write!(f, "#{}", n),
57-
Token::Hash20(b) | Token::Bytes33(b) | Token::Bytes32(b) | Token::Bytes65(b) => {
58-
for ch in b {
59-
write!(f, "{:02x}", *ch)?;
60-
}
61-
Ok(())
62-
}
58+
Token::Hash20(b) => write!(f, "{}", b.as_hex()),
59+
Token::Bytes32(b) => write!(f, "{}", b.as_hex()),
60+
Token::Bytes33(b) => write!(f, "{}", b.as_hex()),
61+
Token::Bytes65(b) => write!(f, "{}", b.as_hex()),
6362
x => write!(f, "{:?}", x),
6463
}
6564
}
@@ -68,18 +67,18 @@ impl fmt::Display for Token<'_> {
6867
#[derive(Debug, Clone)]
6968
/// Iterator that goes through a vector of tokens backward (our parser wants to read
7069
/// backward and this is more efficient anyway since we can use `Vec::pop()`).
71-
pub struct TokenIter<'s>(Vec<Token<'s>>);
70+
pub struct TokenIter(Vec<Token>);
7271

73-
impl<'s> TokenIter<'s> {
72+
impl TokenIter {
7473
/// Create a new TokenIter
75-
pub fn new(v: Vec<Token<'s>>) -> TokenIter<'s> { TokenIter(v) }
74+
pub fn new(v: Vec<Token>) -> TokenIter { TokenIter(v) }
7675

7776
/// Look at the top at Iterator
78-
pub fn peek(&self) -> Option<&'s Token> { self.0.last() }
77+
pub fn peek(&self) -> Option<&Token> { self.0.last() }
7978

8079
/// Push a value to the iterator
8180
/// This will be first value consumed by popun_
82-
pub fn un_next(&mut self, tok: Token<'s>) { self.0.push(tok) }
81+
pub fn un_next(&mut self, tok: Token) { self.0.push(tok) }
8382

8483
/// The len of the iterator
8584
pub fn len(&self) -> usize { self.0.len() }
@@ -88,14 +87,14 @@ impl<'s> TokenIter<'s> {
8887
pub fn is_empty(&self) -> bool { self.0.is_empty() }
8988
}
9089

91-
impl<'s> Iterator for TokenIter<'s> {
92-
type Item = Token<'s>;
90+
impl Iterator for TokenIter {
91+
type Item = Token;
9392

94-
fn next(&mut self) -> Option<Token<'s>> { self.0.pop() }
93+
fn next(&mut self) -> Option<Token> { self.0.pop() }
9594
}
9695

9796
/// Tokenize a script
98-
pub fn lex(script: &'_ script::Script) -> Result<Vec<Token<'_>>, Error> {
97+
pub fn lex(script: &'_ script::Script) -> Result<Vec<Token>, Error> {
9998
let mut ret = Vec::with_capacity(script.len());
10099

101100
for ins in script.instructions_minimal() {
@@ -207,20 +206,22 @@ pub fn lex(script: &'_ script::Script) -> Result<Vec<Token<'_>>, Error> {
207206
ret.push(Token::Hash256);
208207
}
209208
script::Instruction::PushBytes(bytes) => {
210-
match bytes.len() {
211-
20 => ret.push(Token::Hash20(bytes.as_bytes())),
212-
32 => ret.push(Token::Bytes32(bytes.as_bytes())),
213-
33 => ret.push(Token::Bytes33(bytes.as_bytes())),
214-
65 => ret.push(Token::Bytes65(bytes.as_bytes())),
215-
_ => {
216-
// check minimality of the number
217-
match script::read_scriptint(bytes.as_bytes()) {
218-
Ok(v) if v >= 0 => {
219-
ret.push(Token::Num(v as u32));
220-
}
221-
Ok(_) => return Err(Error::InvalidPush(bytes.to_owned().into())),
222-
Err(e) => return Err(Error::Script(e)),
209+
if let Ok(bytes) = bytes.as_bytes().try_into() {
210+
ret.push(Token::Hash20(bytes));
211+
} else if let Ok(bytes) = bytes.as_bytes().try_into() {
212+
ret.push(Token::Bytes32(bytes));
213+
} else if let Ok(bytes) = bytes.as_bytes().try_into() {
214+
ret.push(Token::Bytes33(bytes));
215+
} else if let Ok(bytes) = bytes.as_bytes().try_into() {
216+
ret.push(Token::Bytes65(bytes));
217+
} else {
218+
// check minimality of the number
219+
match script::read_scriptint(bytes.as_bytes()) {
220+
Ok(v) if v >= 0 => {
221+
ret.push(Token::Num(v as u32));
223222
}
223+
Ok(_) => return Err(Error::InvalidPush(bytes.to_owned().into())),
224+
Err(e) => return Err(Error::Script(e)),
224225
}
225226
}
226227
}

0 commit comments

Comments
 (0)