15
15
#include " flags_parser.h"
16
16
#include " local_cache.h"
17
17
#include " memtag.h"
18
+ #include " options.h"
18
19
#include " quarantine.h"
19
20
#include " report.h"
20
21
#include " secondary.h"
@@ -144,16 +145,19 @@ class Allocator {
144
145
reportUnrecognizedFlags ();
145
146
146
147
// Store some flags locally.
147
- Options.MayReturnNull = getFlags ()->may_return_null ;
148
- Options.FillContents =
149
- getFlags ()->zero_contents
150
- ? ZeroFill
151
- : (getFlags ()->pattern_fill_contents ? PatternOrZeroFill : NoFill);
152
- Options.DeallocTypeMismatch = getFlags ()->dealloc_type_mismatch ;
153
- Options.DeleteSizeMismatch = getFlags ()->delete_size_mismatch ;
154
- Options.TrackAllocationStacks = false ;
155
- Options.UseOddEvenTags = true ;
156
- Options.QuarantineMaxChunkSize =
148
+ if (getFlags ()->may_return_null )
149
+ Primary.Options .set (OptionBit::MayReturnNull);
150
+ if (getFlags ()->zero_contents )
151
+ Primary.Options .setFillContentsMode (ZeroFill);
152
+ else if (getFlags ()->pattern_fill_contents )
153
+ Primary.Options .setFillContentsMode (PatternOrZeroFill);
154
+ if (getFlags ()->dealloc_type_mismatch )
155
+ Primary.Options .set (OptionBit::DeallocTypeMismatch);
156
+ if (getFlags ()->delete_size_mismatch )
157
+ Primary.Options .set (OptionBit::DeleteSizeMismatch);
158
+ Primary.Options .set (OptionBit::UseOddEvenTags);
159
+
160
+ QuarantineMaxChunkSize =
157
161
static_cast <u32 >(getFlags ()->quarantine_max_chunk_size );
158
162
159
163
Stats.initLinkerInitialized ();
@@ -250,8 +254,8 @@ class Allocator {
250
254
#endif
251
255
}
252
256
253
- uptr computeOddEvenMaskForPointerMaybe (uptr Ptr, uptr Size) {
254
- if (!Options.UseOddEvenTags )
257
+ uptr computeOddEvenMaskForPointerMaybe (Options Options, uptr Ptr, uptr Size) {
258
+ if (!Options.get (OptionBit:: UseOddEvenTags) )
255
259
return 0 ;
256
260
257
261
// If a chunk's tag is odd, we want the tags of the surrounding blocks to be
@@ -267,6 +271,7 @@ class Allocator {
267
271
uptr Alignment = MinAlignment,
268
272
bool ZeroContents = false ) {
269
273
initThreadMaybe ();
274
+ Options Options = Primary.Options .load ();
270
275
271
276
#ifdef GWP_ASAN_HOOKS
272
277
if (UNLIKELY (GuardedAlloc.shouldSample ())) {
@@ -278,10 +283,10 @@ class Allocator {
278
283
const FillContentsMode FillContents = ZeroContents ? ZeroFill
279
284
: TSDRegistry.getDisableMemInit ()
280
285
? NoFill
281
- : Options.FillContents ;
286
+ : Options.getFillContentsMode () ;
282
287
283
288
if (UNLIKELY (Alignment > MaxAlignment)) {
284
- if (Options.MayReturnNull )
289
+ if (Options.get (OptionBit:: MayReturnNull) )
285
290
return nullptr ;
286
291
reportAlignmentTooBig (Alignment, MaxAlignment);
287
292
}
@@ -300,7 +305,7 @@ class Allocator {
300
305
// Takes care of extravagantly large sizes as well as integer overflows.
301
306
static_assert (MaxAllowedMallocSize < UINTPTR_MAX - MaxAlignment, " " );
302
307
if (UNLIKELY (Size >= MaxAllowedMallocSize)) {
303
- if (Options.MayReturnNull )
308
+ if (Options.get (OptionBit:: MayReturnNull) )
304
309
return nullptr ;
305
310
reportAllocationSizeTooBig (Size, NeededSize, MaxAllowedMallocSize);
306
311
}
@@ -336,7 +341,7 @@ class Allocator {
336
341
FillContents);
337
342
338
343
if (UNLIKELY (!Block)) {
339
- if (Options.MayReturnNull )
344
+ if (Options.get (OptionBit:: MayReturnNull) )
340
345
return nullptr ;
341
346
reportOutOfMemory (NeededSize);
342
347
}
@@ -359,7 +364,7 @@ class Allocator {
359
364
//
360
365
// When memory tagging is enabled, zeroing the contents is done as part of
361
366
// setting the tag.
362
- if (UNLIKELY (useMemoryTagging ())) {
367
+ if (UNLIKELY (useMemoryTagging (Options ))) {
363
368
uptr PrevUserPtr;
364
369
Chunk::UnpackedHeader Header;
365
370
const uptr BlockSize = PrimaryT::getSizeByClassId (ClassId);
@@ -424,10 +429,10 @@ class Allocator {
424
429
}
425
430
} else {
426
431
const uptr OddEvenMask =
427
- computeOddEvenMaskForPointerMaybe (BlockUptr, BlockSize);
432
+ computeOddEvenMaskForPointerMaybe (Options, BlockUptr, BlockSize);
428
433
TaggedPtr = prepareTaggedChunk (Ptr, Size, OddEvenMask, BlockEnd);
429
434
}
430
- storeAllocationStackMaybe (Ptr);
435
+ storeAllocationStackMaybe (Options, Ptr);
431
436
} else if (UNLIKELY (FillContents != NoFill)) {
432
437
// This condition is not necessarily unlikely, but since memset is
433
438
// costly, we might as well mark it as such.
@@ -471,6 +476,7 @@ class Allocator {
471
476
// the TLS destructors, ending up in initialized thread specific data never
472
477
// being destroyed properly. Any other heap operation will do a full init.
473
478
initThreadMaybe (/* MinimalInit=*/ true );
479
+ Options Options = Primary.Options .load ();
474
480
475
481
#ifdef GWP_ASAN_HOOKS
476
482
if (UNLIKELY (GuardedAlloc.pointerIsMine (Ptr))) {
@@ -494,7 +500,7 @@ class Allocator {
494
500
495
501
if (UNLIKELY (Header.State != Chunk::State::Allocated))
496
502
reportInvalidChunkState (AllocatorAction::Deallocating, Ptr);
497
- if (Options.DeallocTypeMismatch ) {
503
+ if (Options.get (OptionBit:: DeallocTypeMismatch) ) {
498
504
if (Header.OriginOrWasZeroed != Origin) {
499
505
// With the exception of memalign'd chunks, that can be still be free'd.
500
506
if (UNLIKELY (Header.OriginOrWasZeroed != Chunk::Origin::Memalign ||
@@ -505,19 +511,20 @@ class Allocator {
505
511
}
506
512
507
513
const uptr Size = getSize (Ptr, &Header);
508
- if (DeleteSize && Options.DeleteSizeMismatch ) {
514
+ if (DeleteSize && Options.get (OptionBit:: DeleteSizeMismatch) ) {
509
515
if (UNLIKELY (DeleteSize != Size))
510
516
reportDeleteSizeMismatch (Ptr, DeleteSize, Size);
511
517
}
512
518
513
- quarantineOrDeallocateChunk (Ptr, &Header, Size);
519
+ quarantineOrDeallocateChunk (Options, Ptr, &Header, Size);
514
520
}
515
521
516
522
void *reallocate (void *OldPtr, uptr NewSize, uptr Alignment = MinAlignment) {
517
523
initThreadMaybe ();
524
+ Options Options = Primary.Options .load ();
518
525
519
526
if (UNLIKELY (NewSize >= MaxAllowedMallocSize)) {
520
- if (Options.MayReturnNull )
527
+ if (Options.get (OptionBit:: MayReturnNull) )
521
528
return nullptr ;
522
529
reportAllocationSizeTooBig (NewSize, 0 , MaxAllowedMallocSize);
523
530
}
@@ -552,7 +559,7 @@ class Allocator {
552
559
// Pointer has to be allocated with a malloc-type function. Some
553
560
// applications think that it is OK to realloc a memalign'ed pointer, which
554
561
// will trigger this check. It really isn't.
555
- if (Options.DeallocTypeMismatch ) {
562
+ if (Options.get (OptionBit:: DeallocTypeMismatch) ) {
556
563
if (UNLIKELY (OldHeader.OriginOrWasZeroed != Chunk::Origin::Malloc))
557
564
reportDeallocTypeMismatch (AllocatorAction::Reallocating, OldPtr,
558
565
OldHeader.OriginOrWasZeroed ,
@@ -583,11 +590,11 @@ class Allocator {
583
590
: BlockEnd - (reinterpret_cast <uptr>(OldPtr) + NewSize)) &
584
591
Chunk::SizeOrUnusedBytesMask;
585
592
Chunk::compareExchangeHeader (Cookie, OldPtr, &NewHeader, &OldHeader);
586
- if (UNLIKELY (ClassId && useMemoryTagging ())) {
593
+ if (UNLIKELY (ClassId && useMemoryTagging (Options ))) {
587
594
resizeTaggedChunk (reinterpret_cast <uptr>(OldTaggedPtr) + OldSize,
588
595
reinterpret_cast <uptr>(OldTaggedPtr) + NewSize,
589
596
BlockEnd);
590
- storeAllocationStackMaybe (OldPtr);
597
+ storeAllocationStackMaybe (Options, OldPtr);
591
598
}
592
599
return OldTaggedPtr;
593
600
}
@@ -601,7 +608,7 @@ class Allocator {
601
608
if (NewPtr) {
602
609
const uptr OldSize = getSize (OldPtr, &OldHeader);
603
610
memcpy (NewPtr, OldTaggedPtr, Min (NewSize, OldSize));
604
- quarantineOrDeallocateChunk (OldPtr, &OldHeader, OldSize);
611
+ quarantineOrDeallocateChunk (Options, OldPtr, &OldHeader, OldSize);
605
612
}
606
613
return NewPtr;
607
614
}
@@ -682,7 +689,7 @@ class Allocator {
682
689
if (getChunkFromBlock (Block, &Chunk, &Header) &&
683
690
Header.State == Chunk::State::Allocated) {
684
691
uptr TaggedChunk = Chunk;
685
- if (useMemoryTagging ())
692
+ if (useMemoryTagging (Primary. Options . load () ))
686
693
TaggedChunk = loadTag (Chunk);
687
694
Callback (TaggedChunk, getSize (reinterpret_cast <void *>(Chunk), &Header),
688
695
Arg);
@@ -697,7 +704,7 @@ class Allocator {
697
704
698
705
bool canReturnNull () {
699
706
initThreadMaybe ();
700
- return Options.MayReturnNull ;
707
+ return Primary. Options .load (). get (OptionBit:: MayReturnNull) ;
701
708
}
702
709
703
710
bool setOption (Option O, sptr Value) {
@@ -711,9 +718,9 @@ class Allocator {
711
718
// any particular chunk is cut in half. Therefore we use this tuning
712
719
// setting to control whether odd/even tags are enabled.
713
720
if (Value == M_MEMTAG_TUNING_BUFFER_OVERFLOW)
714
- Options.UseOddEvenTags = true ;
721
+ Primary. Options .set (OptionBit:: UseOddEvenTags) ;
715
722
else if (Value == M_MEMTAG_TUNING_UAF)
716
- Options.UseOddEvenTags = false ;
723
+ Primary. Options .clear (OptionBit:: UseOddEvenTags) ;
717
724
return true ;
718
725
} else {
719
726
// We leave it to the various sub-components to decide whether or not they
@@ -773,18 +780,26 @@ class Allocator {
773
780
Header.State == Chunk::State::Allocated;
774
781
}
775
782
776
- bool useMemoryTagging () { return Primary.useMemoryTagging (); }
783
+ bool useMemoryTagging () const {
784
+ return useMemoryTagging (Primary.Options .load ());
785
+ }
786
+ static bool useMemoryTagging (Options Options) {
787
+ return PrimaryT::useMemoryTagging (Options);
788
+ }
777
789
778
790
void disableMemoryTagging () { Primary.disableMemoryTagging (); }
779
791
780
792
void setTrackAllocationStacks (bool Track) {
781
793
initThreadMaybe ();
782
- Options.TrackAllocationStacks = Track;
794
+ if (Track)
795
+ Primary.Options .set (OptionBit::TrackAllocationStacks);
796
+ else
797
+ Primary.Options .clear (OptionBit::TrackAllocationStacks);
783
798
}
784
799
785
800
void setFillContents (FillContentsMode FillContents) {
786
801
initThreadMaybe ();
787
- Options.FillContents = FillContents ;
802
+ Primary. Options .setFillContentsMode ( FillContents) ;
788
803
}
789
804
790
805
const char *getStackDepotAddress () const {
@@ -951,16 +966,7 @@ class Allocator {
951
966
static const uptr MaxTraceSize = 64 ;
952
967
953
968
u32 Cookie;
954
-
955
- struct {
956
- u8 MayReturnNull : 1 ; // may_return_null
957
- FillContentsMode FillContents : 2 ; // zero_contents, pattern_fill_contents
958
- u8 DeallocTypeMismatch : 1 ; // dealloc_type_mismatch
959
- u8 DeleteSizeMismatch : 1 ; // delete_size_mismatch
960
- u8 TrackAllocationStacks : 1 ;
961
- u8 UseOddEvenTags : 1 ;
962
- u32 QuarantineMaxChunkSize; // quarantine_max_chunk_size
963
- } Options;
969
+ u32 QuarantineMaxChunkSize;
964
970
965
971
GlobalStats Stats;
966
972
PrimaryT Primary;
@@ -1025,30 +1031,30 @@ class Allocator {
1025
1031
reinterpret_cast <uptr>(Ptr) - SizeOrUnusedBytes;
1026
1032
}
1027
1033
1028
- void quarantineOrDeallocateChunk (void *Ptr, Chunk::UnpackedHeader *Header ,
1029
- uptr Size) {
1034
+ void quarantineOrDeallocateChunk (Options Options, void *Ptr ,
1035
+ Chunk::UnpackedHeader *Header, uptr Size) {
1030
1036
Chunk::UnpackedHeader NewHeader = *Header;
1031
- if (UNLIKELY (NewHeader.ClassId && useMemoryTagging ())) {
1037
+ if (UNLIKELY (NewHeader.ClassId && useMemoryTagging (Options ))) {
1032
1038
u8 PrevTag = extractTag (loadTag (reinterpret_cast <uptr>(Ptr)));
1033
1039
if (!TSDRegistry.getDisableMemInit ()) {
1034
1040
uptr TaggedBegin, TaggedEnd;
1035
1041
const uptr OddEvenMask = computeOddEvenMaskForPointerMaybe (
1036
- reinterpret_cast <uptr>(getBlockBegin (Ptr, &NewHeader)),
1042
+ Options, reinterpret_cast <uptr>(getBlockBegin (Ptr, &NewHeader)),
1037
1043
SizeClassMap::getSizeByClassId (NewHeader.ClassId ));
1038
1044
// Exclude the previous tag so that immediate use after free is detected
1039
1045
// 100% of the time.
1040
1046
setRandomTag (Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin,
1041
1047
&TaggedEnd);
1042
1048
}
1043
1049
NewHeader.OriginOrWasZeroed = !TSDRegistry.getDisableMemInit ();
1044
- storeDeallocationStackMaybe (Ptr, PrevTag);
1050
+ storeDeallocationStackMaybe (Options, Ptr, PrevTag);
1045
1051
}
1046
1052
// If the quarantine is disabled, the actual size of a chunk is 0 or larger
1047
1053
// than the maximum allowed, we return a chunk directly to the backend.
1048
1054
// Logical Or can be short-circuited, which introduces unnecessary
1049
1055
// conditional jumps, so use bitwise Or and let the compiler be clever.
1050
- const bool BypassQuarantine = !Quarantine. getCacheSize () | !Size |
1051
- (Size > Options. QuarantineMaxChunkSize );
1056
+ const bool BypassQuarantine =
1057
+ !Quarantine. getCacheSize () | !Size | (Size > QuarantineMaxChunkSize);
1052
1058
if (BypassQuarantine) {
1053
1059
NewHeader.State = Chunk::State::Available;
1054
1060
Chunk::compareExchangeHeader (Cookie, Ptr, &NewHeader, Header);
@@ -1089,16 +1095,17 @@ class Allocator {
1089
1095
return Offset + Chunk::getHeaderSize ();
1090
1096
}
1091
1097
1092
- void storeAllocationStackMaybe (void *Ptr) {
1093
- if (!UNLIKELY (Options.TrackAllocationStacks ))
1098
+ void storeAllocationStackMaybe (Options Options, void *Ptr) {
1099
+ if (!UNLIKELY (Options.get (OptionBit:: TrackAllocationStacks) ))
1094
1100
return ;
1095
1101
auto *Ptr32 = reinterpret_cast <u32 *>(Ptr);
1096
1102
Ptr32[MemTagAllocationTraceIndex] = collectStackTrace ();
1097
1103
Ptr32[MemTagAllocationTidIndex] = getThreadID ();
1098
1104
}
1099
1105
1100
- void storeDeallocationStackMaybe (void *Ptr, uint8_t PrevTag) {
1101
- if (!UNLIKELY (Options.TrackAllocationStacks ))
1106
+ void storeDeallocationStackMaybe (Options Options, void *Ptr,
1107
+ uint8_t PrevTag) {
1108
+ if (!UNLIKELY (Options.get (OptionBit::TrackAllocationStacks)))
1102
1109
return ;
1103
1110
1104
1111
// Disable tag checks here so that we don't need to worry about zero sized
0 commit comments