Skip to content

Commit fdad462

Browse files
authored
zeroize: add #[inline(always)] annotations (#772)
1 parent 4d0c62e commit fdad462

File tree

1 file changed

+25
-17
lines changed

1 file changed

+25
-17
lines changed

zeroize/src/lib.rs

Lines changed: 25 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -284,6 +284,19 @@ pub trait ZeroizeOnDrop {}
284284
/// Marker trait for types whose [`Default`] is the desired zeroization result
285285
pub trait DefaultIsZeroes: Copy + Default + Sized {}
286286

287+
/// Fallible trait for representing cases where zeroization may or may not be
288+
/// possible.
289+
///
290+
/// This is primarily useful for scenarios like reference counted data, where
291+
/// zeroization is only possible when the last reference is dropped.
292+
pub trait TryZeroize {
293+
/// Try to zero out this object from memory using Rust intrinsics which
294+
/// ensure the zeroization operation is not "optimized away" by the
295+
/// compiler.
296+
#[must_use]
297+
fn try_zeroize(&mut self) -> bool;
298+
}
299+
287300
impl<Z> Zeroize for Z
288301
where
289302
Z: DefaultIsZeroes,
@@ -598,19 +611,6 @@ impl Zeroize for CString {
598611
}
599612
}
600613

601-
/// Fallible trait for representing cases where zeroization may or may not be
602-
/// possible.
603-
///
604-
/// This is primarily useful for scenarios like reference counted data, where
605-
/// zeroization is only possible when the last reference is dropped.
606-
pub trait TryZeroize {
607-
/// Try to zero out this object from memory using Rust intrinsics which
608-
/// ensure the zeroization operation is not "optimized away" by the
609-
/// compiler.
610-
#[must_use]
611-
fn try_zeroize(&mut self) -> bool;
612-
}
613-
614614
/// `Zeroizing` is a a wrapper for any `Z: Zeroize` type which implements a
615615
/// `Drop` handler which zeroizes dropped values.
616616
#[derive(Debug, Default, Eq, PartialEq)]
@@ -622,16 +622,19 @@ where
622622
{
623623
/// Move value inside a `Zeroizing` wrapper which ensures it will be
624624
/// zeroized when it's dropped.
625+
#[inline(always)]
625626
pub fn new(value: Z) -> Self {
626-
value.into()
627+
Self(value)
627628
}
628629
}
629630

630631
impl<Z: Zeroize + Clone> Clone for Zeroizing<Z> {
632+
#[inline(always)]
631633
fn clone(&self) -> Self {
632634
Self(self.0.clone())
633635
}
634636

637+
#[inline(always)]
635638
fn clone_from(&mut self, source: &Self) {
636639
self.0.zeroize();
637640
self.0.clone_from(&source.0);
@@ -642,6 +645,7 @@ impl<Z> From<Z> for Zeroizing<Z>
642645
where
643646
Z: Zeroize,
644647
{
648+
#[inline(always)]
645649
fn from(value: Z) -> Zeroizing<Z> {
646650
Zeroizing(value)
647651
}
@@ -653,6 +657,7 @@ where
653657
{
654658
type Target = Z;
655659

660+
#[inline(always)]
656661
fn deref(&self) -> &Z {
657662
&self.0
658663
}
@@ -662,6 +667,7 @@ impl<Z> ops::DerefMut for Zeroizing<Z>
662667
where
663668
Z: Zeroize,
664669
{
670+
#[inline(always)]
665671
fn deref_mut(&mut self) -> &mut Z {
666672
&mut self.0
667673
}
@@ -672,6 +678,7 @@ where
672678
T: ?Sized,
673679
Z: AsRef<T> + Zeroize,
674680
{
681+
#[inline(always)]
675682
fn as_ref(&self) -> &T {
676683
self.0.as_ref()
677684
}
@@ -682,6 +689,7 @@ where
682689
T: ?Sized,
683690
Z: AsMut<T> + Zeroize,
684691
{
692+
#[inline(always)]
685693
fn as_mut(&mut self) -> &mut T {
686694
self.0.as_mut()
687695
}
@@ -710,13 +718,13 @@ where
710718
/// Use fences to prevent accesses from being reordered before this
711719
/// point, which should hopefully help ensure that all accessors
712720
/// see zeroes after this point.
713-
#[inline]
721+
#[inline(always)]
714722
fn atomic_fence() {
715723
atomic::compiler_fence(atomic::Ordering::SeqCst);
716724
}
717725

718726
/// Perform a volatile write to the destination
719-
#[inline]
727+
#[inline(always)]
720728
fn volatile_write<T: Copy + Sized>(dst: &mut T, src: T) {
721729
unsafe { ptr::write_volatile(dst, src) }
722730
}
@@ -729,7 +737,7 @@ fn volatile_write<T: Copy + Sized>(dst: &mut T, src: T) {
729737
/// `count` must not be larger than an `isize`.
730738
/// `dst` being offset by `mem::size_of::<T> * count` bytes must not wrap around the address space.
731739
/// Also `dst` must be properly aligned.
732-
#[inline]
740+
#[inline(always)]
733741
unsafe fn volatile_set<T: Copy + Sized>(dst: *mut T, src: T, count: usize) {
734742
// TODO(tarcieri): use `volatile_set_memory` when stabilized
735743
for i in 0..count {

0 commit comments

Comments
 (0)