@@ -15,7 +15,9 @@ use provenance_map::*;
1515use rustc_abi:: { Align , HasDataLayout , Size } ;
1616use rustc_ast:: Mutability ;
1717use rustc_data_structures:: intern:: Interned ;
18- use rustc_macros:: { HashStable , TyDecodable , TyEncodable } ;
18+ use rustc_macros:: HashStable ;
19+ use rustc_serialize:: { Decodable , Encodable } ;
20+ use rustc_type_ir:: { TyDecoder , TyEncoder } ;
1921
2022use super :: {
2123 AllocId , BadBytesAccess , CtfeProvenance , InterpErrorKind , InterpResult , Pointer ,
@@ -77,7 +79,7 @@ impl AllocBytes for Box<[u8]> {
7779/// module provides higher-level access.
7880// Note: for performance reasons when interning, some of the `Allocation` fields can be partially
7981// hashed. (see the `Hash` impl below for more details), so the impl is not derived.
80- #[ derive( Clone , Eq , PartialEq , TyEncodable , TyDecodable ) ]
82+ #[ derive( Clone , Eq , PartialEq ) ]
8183#[ derive( HashStable ) ]
8284pub struct Allocation < Prov : Provenance = CtfeProvenance , Extra = ( ) , Bytes = Box < [ u8 ] > > {
8385 /// The actual bytes of the allocation.
@@ -101,6 +103,108 @@ pub struct Allocation<Prov: Provenance = CtfeProvenance, Extra = (), Bytes = Box
101103 pub extra : Extra ,
102104}
103105
106+ /// Helper struct that packs an alignment, mutability, and "all bytes are zero" flag together.
107+ ///
108+ /// Alignment values always have 2 free high bits.
109+ struct AllocFlags {
110+ align : Align ,
111+ mutability : Mutability ,
112+ all_zero : bool ,
113+ }
114+
115+ impl < E : TyEncoder > Encodable < E > for AllocFlags {
116+ fn encode ( & self , encoder : & mut E ) {
117+ let mut flags = self . align . bytes ( ) . trailing_zeros ( ) as u8 ;
118+ flags |= match self . mutability {
119+ Mutability :: Not => 0 ,
120+ Mutability :: Mut => 1 << 6 ,
121+ } ;
122+ flags |= ( self . all_zero as u8 ) << 7 ;
123+ flags. encode ( encoder) ;
124+ }
125+ }
126+
127+ impl < D : TyDecoder > Decodable < D > for AllocFlags {
128+ fn decode ( decoder : & mut D ) -> Self {
129+ let flags: u8 = Decodable :: decode ( decoder) ;
130+ let align = flags & 0b0011_1111 ;
131+ let mutability = flags & 0b0100_0000 ;
132+ let all_zero = flags & 0b1000_0000 ;
133+
134+ let align = Align :: from_bytes ( 1 << align) . unwrap ( ) ;
135+ let mutability = match mutability {
136+ 0 => Mutability :: Not ,
137+ _ => Mutability :: Mut ,
138+ } ;
139+ let all_zero = all_zero > 0 ;
140+
141+ AllocFlags { align, mutability, all_zero }
142+ }
143+ }
144+
145+ /// Efficiently detect whether a slice of `u8` is all zero
146+ ///
147+ /// This is used in encoding of [`Allocation`] to special-case all-zero allocations. It is only
148+ /// optimized a little.
149+ #[ inline]
150+ fn all_zero ( buf : & [ u8 ] ) -> bool {
151+ // In the empty case we wouldn't encode any contents even without this system where we
152+ // special-case allocations whose contents are all 0. We can return anything in the empty case.
153+ if buf. is_empty ( ) {
154+ return true ;
155+ }
156+ // Just fast-rejecting based on the first element significantly reduces the amount that we end
157+ // up walking the whole array.
158+ if buf. get ( 0 ) != Some ( 0 ) {
159+ return false ;
160+ }
161+
162+ // This strategy of combining all slice elements with & or | is unbeatable for the large
163+ // all-zero case because it is so well-understood by autovectorization.
164+ buf. iter ( ) . fold ( true , |acc, b| acc & ( * b == 0 ) )
165+ }
166+
167+ impl < Prov : Provenance , Extra , Bytes , E : TyEncoder > Encodable < E > for Allocation < Prov , Extra , Bytes >
168+ where
169+ Bytes : AllocBytes ,
170+ ProvenanceMap < Prov > : Encodable < E > ,
171+ Extra : Encodable < E > ,
172+ {
173+ fn encode ( & self , encoder : & mut E ) {
174+ let all_zero = all_zero ( & self . bytes ) ;
175+ AllocFlags { align : self . align , mutability : self . mutability , all_zero } . encode ( encoder) ;
176+
177+ encoder. emit_usize ( self . bytes . len ( ) ) ;
178+ if !all_zero {
179+ encoder. emit_raw_bytes ( & self . bytes ) ;
180+ }
181+ self . provenance . encode ( encoder) ;
182+ self . init_mask . encode ( encoder) ;
183+ self . extra . encode ( encoder) ;
184+ }
185+ }
186+
187+ impl < Prov : Provenance , Extra , Bytes , D : TyDecoder > Decodable < D > for Allocation < Prov , Extra , Bytes >
188+ where
189+ Bytes : AllocBytes ,
190+ ProvenanceMap < Prov > : Decodable < D > ,
191+ Extra : Decodable < D > ,
192+ {
193+ fn decode ( decoder : & mut D ) -> Self {
194+ let AllocFlags { align, mutability, all_zero } = Decodable :: decode ( decoder) ;
195+
196+ let len = decoder. read_usize ( ) ;
197+ let bytes = if all_zero { vec ! [ 0u8 ; len] } else { decoder. read_raw_bytes ( len) . to_vec ( ) } ;
198+ let bytes = Bytes :: from_bytes ( bytes, align) ;
199+
200+ let provenance = Decodable :: decode ( decoder) ;
201+ let init_mask = Decodable :: decode ( decoder) ;
202+ let extra = Decodable :: decode ( decoder) ;
203+
204+ Self { bytes, provenance, init_mask, align, mutability, extra }
205+ }
206+ }
207+
104208/// This is the maximum size we will hash at a time, when interning an `Allocation` and its
105209/// `InitMask`. Note, we hash that amount of bytes twice: at the start, and at the end of a buffer.
106210/// Used when these two structures are large: we only partially hash the larger fields in that
0 commit comments