Skip to content

Commit 719ab73

Browse files
committed
scudo: Make it thread-safe to set some runtime configuration flags.
Move some of the flags previously in Options, as well as the UseMemoryTagging flag previously in the primary allocator, into an atomic variable so that it can be updated while other threads are running. Relaxed accesses are used because we only have the requirement that the other threads see the new value eventually. The code is set up so that the variable is generally loaded once per allocation function call with the exception of some rarely used code such as error handlers. The flag bits can generally stay in a register during the execution of the allocation function which means that they can be branched on with minimal overhead (e.g. TBZ on aarch64). Differential Revision: https://reviews.llvm.org/D88523
1 parent 4fcd1a8 commit 719ab73

File tree

6 files changed

+168
-71
lines changed

6 files changed

+168
-71
lines changed

compiler-rt/lib/scudo/standalone/atomic_helpers.h

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,20 @@ inline typename T::Type atomic_fetch_sub(volatile T *A, typename T::Type V,
8989
return __atomic_fetch_sub(&A->ValDoNotUse, V, MO);
9090
}
9191

92+
template <typename T>
93+
inline typename T::Type atomic_fetch_and(volatile T *A, typename T::Type V,
94+
memory_order MO) {
95+
DCHECK(!(reinterpret_cast<uptr>(A) % sizeof(*A)));
96+
return __atomic_fetch_and(&A->ValDoNotUse, V, MO);
97+
}
98+
99+
template <typename T>
100+
inline typename T::Type atomic_fetch_or(volatile T *A, typename T::Type V,
101+
memory_order MO) {
102+
DCHECK(!(reinterpret_cast<uptr>(A) % sizeof(*A)));
103+
return __atomic_fetch_or(&A->ValDoNotUse, V, MO);
104+
}
105+
92106
template <typename T>
93107
inline typename T::Type atomic_exchange(volatile T *A, typename T::Type V,
94108
memory_order MO) {

compiler-rt/lib/scudo/standalone/combined.h

Lines changed: 62 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
#include "flags_parser.h"
1616
#include "local_cache.h"
1717
#include "memtag.h"
18+
#include "options.h"
1819
#include "quarantine.h"
1920
#include "report.h"
2021
#include "secondary.h"
@@ -144,16 +145,19 @@ class Allocator {
144145
reportUnrecognizedFlags();
145146

146147
// Store some flags locally.
147-
Options.MayReturnNull = getFlags()->may_return_null;
148-
Options.FillContents =
149-
getFlags()->zero_contents
150-
? ZeroFill
151-
: (getFlags()->pattern_fill_contents ? PatternOrZeroFill : NoFill);
152-
Options.DeallocTypeMismatch = getFlags()->dealloc_type_mismatch;
153-
Options.DeleteSizeMismatch = getFlags()->delete_size_mismatch;
154-
Options.TrackAllocationStacks = false;
155-
Options.UseOddEvenTags = true;
156-
Options.QuarantineMaxChunkSize =
148+
if (getFlags()->may_return_null)
149+
Primary.Options.set(OptionBit::MayReturnNull);
150+
if (getFlags()->zero_contents)
151+
Primary.Options.setFillContentsMode(ZeroFill);
152+
else if (getFlags()->pattern_fill_contents)
153+
Primary.Options.setFillContentsMode(PatternOrZeroFill);
154+
if (getFlags()->dealloc_type_mismatch)
155+
Primary.Options.set(OptionBit::DeallocTypeMismatch);
156+
if (getFlags()->delete_size_mismatch)
157+
Primary.Options.set(OptionBit::DeleteSizeMismatch);
158+
Primary.Options.set(OptionBit::UseOddEvenTags);
159+
160+
QuarantineMaxChunkSize =
157161
static_cast<u32>(getFlags()->quarantine_max_chunk_size);
158162

159163
Stats.initLinkerInitialized();
@@ -250,8 +254,8 @@ class Allocator {
250254
#endif
251255
}
252256

253-
uptr computeOddEvenMaskForPointerMaybe(uptr Ptr, uptr Size) {
254-
if (!Options.UseOddEvenTags)
257+
uptr computeOddEvenMaskForPointerMaybe(Options Options, uptr Ptr, uptr Size) {
258+
if (!Options.get(OptionBit::UseOddEvenTags))
255259
return 0;
256260

257261
// If a chunk's tag is odd, we want the tags of the surrounding blocks to be
@@ -267,6 +271,7 @@ class Allocator {
267271
uptr Alignment = MinAlignment,
268272
bool ZeroContents = false) {
269273
initThreadMaybe();
274+
Options Options = Primary.Options.load();
270275

271276
#ifdef GWP_ASAN_HOOKS
272277
if (UNLIKELY(GuardedAlloc.shouldSample())) {
@@ -278,10 +283,10 @@ class Allocator {
278283
const FillContentsMode FillContents = ZeroContents ? ZeroFill
279284
: TSDRegistry.getDisableMemInit()
280285
? NoFill
281-
: Options.FillContents;
286+
: Options.getFillContentsMode();
282287

283288
if (UNLIKELY(Alignment > MaxAlignment)) {
284-
if (Options.MayReturnNull)
289+
if (Options.get(OptionBit::MayReturnNull))
285290
return nullptr;
286291
reportAlignmentTooBig(Alignment, MaxAlignment);
287292
}
@@ -300,7 +305,7 @@ class Allocator {
300305
// Takes care of extravagantly large sizes as well as integer overflows.
301306
static_assert(MaxAllowedMallocSize < UINTPTR_MAX - MaxAlignment, "");
302307
if (UNLIKELY(Size >= MaxAllowedMallocSize)) {
303-
if (Options.MayReturnNull)
308+
if (Options.get(OptionBit::MayReturnNull))
304309
return nullptr;
305310
reportAllocationSizeTooBig(Size, NeededSize, MaxAllowedMallocSize);
306311
}
@@ -336,7 +341,7 @@ class Allocator {
336341
FillContents);
337342

338343
if (UNLIKELY(!Block)) {
339-
if (Options.MayReturnNull)
344+
if (Options.get(OptionBit::MayReturnNull))
340345
return nullptr;
341346
reportOutOfMemory(NeededSize);
342347
}
@@ -359,7 +364,7 @@ class Allocator {
359364
//
360365
// When memory tagging is enabled, zeroing the contents is done as part of
361366
// setting the tag.
362-
if (UNLIKELY(useMemoryTagging())) {
367+
if (UNLIKELY(useMemoryTagging(Options))) {
363368
uptr PrevUserPtr;
364369
Chunk::UnpackedHeader Header;
365370
const uptr BlockSize = PrimaryT::getSizeByClassId(ClassId);
@@ -424,10 +429,10 @@ class Allocator {
424429
}
425430
} else {
426431
const uptr OddEvenMask =
427-
computeOddEvenMaskForPointerMaybe(BlockUptr, BlockSize);
432+
computeOddEvenMaskForPointerMaybe(Options, BlockUptr, BlockSize);
428433
TaggedPtr = prepareTaggedChunk(Ptr, Size, OddEvenMask, BlockEnd);
429434
}
430-
storeAllocationStackMaybe(Ptr);
435+
storeAllocationStackMaybe(Options, Ptr);
431436
} else if (UNLIKELY(FillContents != NoFill)) {
432437
// This condition is not necessarily unlikely, but since memset is
433438
// costly, we might as well mark it as such.
@@ -471,6 +476,7 @@ class Allocator {
471476
// the TLS destructors, ending up in initialized thread specific data never
472477
// being destroyed properly. Any other heap operation will do a full init.
473478
initThreadMaybe(/*MinimalInit=*/true);
479+
Options Options = Primary.Options.load();
474480

475481
#ifdef GWP_ASAN_HOOKS
476482
if (UNLIKELY(GuardedAlloc.pointerIsMine(Ptr))) {
@@ -494,7 +500,7 @@ class Allocator {
494500

495501
if (UNLIKELY(Header.State != Chunk::State::Allocated))
496502
reportInvalidChunkState(AllocatorAction::Deallocating, Ptr);
497-
if (Options.DeallocTypeMismatch) {
503+
if (Options.get(OptionBit::DeallocTypeMismatch)) {
498504
if (Header.OriginOrWasZeroed != Origin) {
499505
// With the exception of memalign'd chunks, that can be still be free'd.
500506
if (UNLIKELY(Header.OriginOrWasZeroed != Chunk::Origin::Memalign ||
@@ -505,19 +511,20 @@ class Allocator {
505511
}
506512

507513
const uptr Size = getSize(Ptr, &Header);
508-
if (DeleteSize && Options.DeleteSizeMismatch) {
514+
if (DeleteSize && Options.get(OptionBit::DeleteSizeMismatch)) {
509515
if (UNLIKELY(DeleteSize != Size))
510516
reportDeleteSizeMismatch(Ptr, DeleteSize, Size);
511517
}
512518

513-
quarantineOrDeallocateChunk(Ptr, &Header, Size);
519+
quarantineOrDeallocateChunk(Options, Ptr, &Header, Size);
514520
}
515521

516522
void *reallocate(void *OldPtr, uptr NewSize, uptr Alignment = MinAlignment) {
517523
initThreadMaybe();
524+
Options Options = Primary.Options.load();
518525

519526
if (UNLIKELY(NewSize >= MaxAllowedMallocSize)) {
520-
if (Options.MayReturnNull)
527+
if (Options.get(OptionBit::MayReturnNull))
521528
return nullptr;
522529
reportAllocationSizeTooBig(NewSize, 0, MaxAllowedMallocSize);
523530
}
@@ -552,7 +559,7 @@ class Allocator {
552559
// Pointer has to be allocated with a malloc-type function. Some
553560
// applications think that it is OK to realloc a memalign'ed pointer, which
554561
// will trigger this check. It really isn't.
555-
if (Options.DeallocTypeMismatch) {
562+
if (Options.get(OptionBit::DeallocTypeMismatch)) {
556563
if (UNLIKELY(OldHeader.OriginOrWasZeroed != Chunk::Origin::Malloc))
557564
reportDeallocTypeMismatch(AllocatorAction::Reallocating, OldPtr,
558565
OldHeader.OriginOrWasZeroed,
@@ -583,11 +590,11 @@ class Allocator {
583590
: BlockEnd - (reinterpret_cast<uptr>(OldPtr) + NewSize)) &
584591
Chunk::SizeOrUnusedBytesMask;
585592
Chunk::compareExchangeHeader(Cookie, OldPtr, &NewHeader, &OldHeader);
586-
if (UNLIKELY(ClassId && useMemoryTagging())) {
593+
if (UNLIKELY(ClassId && useMemoryTagging(Options))) {
587594
resizeTaggedChunk(reinterpret_cast<uptr>(OldTaggedPtr) + OldSize,
588595
reinterpret_cast<uptr>(OldTaggedPtr) + NewSize,
589596
BlockEnd);
590-
storeAllocationStackMaybe(OldPtr);
597+
storeAllocationStackMaybe(Options, OldPtr);
591598
}
592599
return OldTaggedPtr;
593600
}
@@ -601,7 +608,7 @@ class Allocator {
601608
if (NewPtr) {
602609
const uptr OldSize = getSize(OldPtr, &OldHeader);
603610
memcpy(NewPtr, OldTaggedPtr, Min(NewSize, OldSize));
604-
quarantineOrDeallocateChunk(OldPtr, &OldHeader, OldSize);
611+
quarantineOrDeallocateChunk(Options, OldPtr, &OldHeader, OldSize);
605612
}
606613
return NewPtr;
607614
}
@@ -682,7 +689,7 @@ class Allocator {
682689
if (getChunkFromBlock(Block, &Chunk, &Header) &&
683690
Header.State == Chunk::State::Allocated) {
684691
uptr TaggedChunk = Chunk;
685-
if (useMemoryTagging())
692+
if (useMemoryTagging(Primary.Options.load()))
686693
TaggedChunk = loadTag(Chunk);
687694
Callback(TaggedChunk, getSize(reinterpret_cast<void *>(Chunk), &Header),
688695
Arg);
@@ -697,7 +704,7 @@ class Allocator {
697704

698705
bool canReturnNull() {
699706
initThreadMaybe();
700-
return Options.MayReturnNull;
707+
return Primary.Options.load().get(OptionBit::MayReturnNull);
701708
}
702709

703710
bool setOption(Option O, sptr Value) {
@@ -711,9 +718,9 @@ class Allocator {
711718
// any particular chunk is cut in half. Therefore we use this tuning
712719
// setting to control whether odd/even tags are enabled.
713720
if (Value == M_MEMTAG_TUNING_BUFFER_OVERFLOW)
714-
Options.UseOddEvenTags = true;
721+
Primary.Options.set(OptionBit::UseOddEvenTags);
715722
else if (Value == M_MEMTAG_TUNING_UAF)
716-
Options.UseOddEvenTags = false;
723+
Primary.Options.clear(OptionBit::UseOddEvenTags);
717724
return true;
718725
} else {
719726
// We leave it to the various sub-components to decide whether or not they
@@ -773,18 +780,26 @@ class Allocator {
773780
Header.State == Chunk::State::Allocated;
774781
}
775782

776-
bool useMemoryTagging() { return Primary.useMemoryTagging(); }
783+
bool useMemoryTagging() const {
784+
return useMemoryTagging(Primary.Options.load());
785+
}
786+
static bool useMemoryTagging(Options Options) {
787+
return PrimaryT::useMemoryTagging(Options);
788+
}
777789

778790
void disableMemoryTagging() { Primary.disableMemoryTagging(); }
779791

780792
void setTrackAllocationStacks(bool Track) {
781793
initThreadMaybe();
782-
Options.TrackAllocationStacks = Track;
794+
if (Track)
795+
Primary.Options.set(OptionBit::TrackAllocationStacks);
796+
else
797+
Primary.Options.clear(OptionBit::TrackAllocationStacks);
783798
}
784799

785800
void setFillContents(FillContentsMode FillContents) {
786801
initThreadMaybe();
787-
Options.FillContents = FillContents;
802+
Primary.Options.setFillContentsMode(FillContents);
788803
}
789804

790805
const char *getStackDepotAddress() const {
@@ -951,16 +966,7 @@ class Allocator {
951966
static const uptr MaxTraceSize = 64;
952967

953968
u32 Cookie;
954-
955-
struct {
956-
u8 MayReturnNull : 1; // may_return_null
957-
FillContentsMode FillContents : 2; // zero_contents, pattern_fill_contents
958-
u8 DeallocTypeMismatch : 1; // dealloc_type_mismatch
959-
u8 DeleteSizeMismatch : 1; // delete_size_mismatch
960-
u8 TrackAllocationStacks : 1;
961-
u8 UseOddEvenTags : 1;
962-
u32 QuarantineMaxChunkSize; // quarantine_max_chunk_size
963-
} Options;
969+
u32 QuarantineMaxChunkSize;
964970

965971
GlobalStats Stats;
966972
PrimaryT Primary;
@@ -1025,30 +1031,30 @@ class Allocator {
10251031
reinterpret_cast<uptr>(Ptr) - SizeOrUnusedBytes;
10261032
}
10271033

1028-
void quarantineOrDeallocateChunk(void *Ptr, Chunk::UnpackedHeader *Header,
1029-
uptr Size) {
1034+
void quarantineOrDeallocateChunk(Options Options, void *Ptr,
1035+
Chunk::UnpackedHeader *Header, uptr Size) {
10301036
Chunk::UnpackedHeader NewHeader = *Header;
1031-
if (UNLIKELY(NewHeader.ClassId && useMemoryTagging())) {
1037+
if (UNLIKELY(NewHeader.ClassId && useMemoryTagging(Options))) {
10321038
u8 PrevTag = extractTag(loadTag(reinterpret_cast<uptr>(Ptr)));
10331039
if (!TSDRegistry.getDisableMemInit()) {
10341040
uptr TaggedBegin, TaggedEnd;
10351041
const uptr OddEvenMask = computeOddEvenMaskForPointerMaybe(
1036-
reinterpret_cast<uptr>(getBlockBegin(Ptr, &NewHeader)),
1042+
Options, reinterpret_cast<uptr>(getBlockBegin(Ptr, &NewHeader)),
10371043
SizeClassMap::getSizeByClassId(NewHeader.ClassId));
10381044
// Exclude the previous tag so that immediate use after free is detected
10391045
// 100% of the time.
10401046
setRandomTag(Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin,
10411047
&TaggedEnd);
10421048
}
10431049
NewHeader.OriginOrWasZeroed = !TSDRegistry.getDisableMemInit();
1044-
storeDeallocationStackMaybe(Ptr, PrevTag);
1050+
storeDeallocationStackMaybe(Options, Ptr, PrevTag);
10451051
}
10461052
// If the quarantine is disabled, the actual size of a chunk is 0 or larger
10471053
// than the maximum allowed, we return a chunk directly to the backend.
10481054
// Logical Or can be short-circuited, which introduces unnecessary
10491055
// conditional jumps, so use bitwise Or and let the compiler be clever.
1050-
const bool BypassQuarantine = !Quarantine.getCacheSize() | !Size |
1051-
(Size > Options.QuarantineMaxChunkSize);
1056+
const bool BypassQuarantine =
1057+
!Quarantine.getCacheSize() | !Size | (Size > QuarantineMaxChunkSize);
10521058
if (BypassQuarantine) {
10531059
NewHeader.State = Chunk::State::Available;
10541060
Chunk::compareExchangeHeader(Cookie, Ptr, &NewHeader, Header);
@@ -1089,16 +1095,17 @@ class Allocator {
10891095
return Offset + Chunk::getHeaderSize();
10901096
}
10911097

1092-
void storeAllocationStackMaybe(void *Ptr) {
1093-
if (!UNLIKELY(Options.TrackAllocationStacks))
1098+
void storeAllocationStackMaybe(Options Options, void *Ptr) {
1099+
if (!UNLIKELY(Options.get(OptionBit::TrackAllocationStacks)))
10941100
return;
10951101
auto *Ptr32 = reinterpret_cast<u32 *>(Ptr);
10961102
Ptr32[MemTagAllocationTraceIndex] = collectStackTrace();
10971103
Ptr32[MemTagAllocationTidIndex] = getThreadID();
10981104
}
10991105

1100-
void storeDeallocationStackMaybe(void *Ptr, uint8_t PrevTag) {
1101-
if (!UNLIKELY(Options.TrackAllocationStacks))
1106+
void storeDeallocationStackMaybe(Options Options, void *Ptr,
1107+
uint8_t PrevTag) {
1108+
if (!UNLIKELY(Options.get(OptionBit::TrackAllocationStacks)))
11021109
return;
11031110

11041111
// Disable tag checks here so that we don't need to worry about zero sized

0 commit comments

Comments
 (0)