Skip to content

Commit bdafe1e

Browse files
vitalybukamemfrob
authored andcommitted
[NFC][scudo] Split ScudoCombinedTest.BasicCombined
Existing implementations took up to 30 minutues to execute on my setup. Now it's more convenient to debug a single test. Reviewed By: cryptoad Differential Revision: https://reviews.llvm.org/D99786
1 parent 9cbdbe2 commit bdafe1e

File tree

1 file changed

+88
-32
lines changed

1 file changed

+88
-32
lines changed

compiler-rt/lib/scudo/standalone/tests/combined_test.cpp

Lines changed: 88 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
#include <vector>
2020

2121
static constexpr scudo::Chunk::Origin Origin = scudo::Chunk::Origin::Malloc;
22+
static constexpr scudo::uptr MinAlignLog = FIRST_32_SECOND_64(3U, 4U);
2223

2324
// Fuchsia complains that the function is not used.
2425
UNUSED static void disableDebuggerdMaybe() {
@@ -95,7 +96,12 @@ template <class Config> struct ScudoCombinedTest : public ::testing::Test {
9596
UseQuarantine = std::is_same<Config, scudo::AndroidConfig>::value;
9697
Allocator = std::make_unique<AllocatorT>();
9798
}
98-
~ScudoCombinedTest() { UseQuarantine = true; }
99+
~ScudoCombinedTest() {
100+
Allocator->releaseToOS();
101+
UseQuarantine = true;
102+
}
103+
104+
void BasicTest(scudo::uptr SizeLogMin, scudo::uptr SizeLogMax);
99105

100106
using AllocatorT = TestAllocator<Config>;
101107
std::unique_ptr<AllocatorT> Allocator;
@@ -111,7 +117,7 @@ using ScudoCombinedTestTypes = testing::Types<scudo::AndroidSvelteConfig,
111117
>;
112118
TYPED_TEST_CASE(ScudoCombinedTest, ScudoCombinedTestTypes);
113119

114-
TYPED_TEST(ScudoCombinedTest, BasicCombined) {
120+
TYPED_TEST(ScudoCombinedTest, IsOwned) {
115121
auto *Allocator = this->Allocator.get();
116122
static scudo::u8 StaticBuffer[scudo::Chunk::getHeaderSize() + 1];
117123
EXPECT_FALSE(
@@ -123,13 +129,17 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
123129
EXPECT_FALSE(Allocator->isOwned(&StackBuffer[scudo::Chunk::getHeaderSize()]));
124130
for (scudo::uptr I = 0; I < sizeof(StackBuffer); I++)
125131
EXPECT_EQ(StackBuffer[I], 0x42U);
132+
}
126133

127-
constexpr scudo::uptr MinAlignLog = FIRST_32_SECOND_64(3U, 4U);
134+
template <class Config>
135+
void ScudoCombinedTest<Config>::BasicTest(scudo::uptr SizeLogMin,
136+
scudo::uptr SizeLogMax) {
137+
auto *Allocator = this->Allocator.get();
128138

129139
// This allocates and deallocates a bunch of chunks, with a wide range of
130140
// sizes and alignments, with a focus on sizes that could trigger weird
131141
// behaviors (plus or minus a small delta of a power of two for example).
132-
for (scudo::uptr SizeLog = 0U; SizeLog <= 20U; SizeLog++) {
142+
for (scudo::uptr SizeLog = SizeLogMin; SizeLog <= SizeLogMax; SizeLog++) {
133143
for (scudo::uptr AlignLog = MinAlignLog; AlignLog <= 16U; AlignLog++) {
134144
const scudo::uptr Align = 1U << AlignLog;
135145
for (scudo::sptr Delta = -32; Delta <= 32; Delta++) {
@@ -147,7 +157,15 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
147157
}
148158
}
149159
}
150-
Allocator->releaseToOS();
160+
}
161+
162+
TYPED_TEST(ScudoCombinedTest, BasicCombined0) { this->BasicTest(0, 16); }
163+
TYPED_TEST(ScudoCombinedTest, BasicCombined1) { this->BasicTest(17, 18); }
164+
TYPED_TEST(ScudoCombinedTest, BasicCombined2) { this->BasicTest(19, 19); }
165+
TYPED_TEST(ScudoCombinedTest, BasicCombined3) { this->BasicTest(20, 20); }
166+
167+
TYPED_TEST(ScudoCombinedTest, ZeroContents) {
168+
auto *Allocator = this->Allocator.get();
151169

152170
// Ensure that specifying ZeroContents returns a zero'd out block.
153171
for (scudo::uptr SizeLog = 0U; SizeLog <= 20U; SizeLog++) {
@@ -161,7 +179,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
161179
Allocator->deallocate(P, Origin, Size);
162180
}
163181
}
164-
Allocator->releaseToOS();
182+
}
183+
184+
TYPED_TEST(ScudoCombinedTest, ZeroFill) {
185+
auto *Allocator = this->Allocator.get();
165186

166187
// Ensure that specifying ZeroContents returns a zero'd out block.
167188
Allocator->setFillContents(scudo::ZeroFill);
@@ -176,7 +197,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
176197
Allocator->deallocate(P, Origin, Size);
177198
}
178199
}
179-
Allocator->releaseToOS();
200+
}
201+
202+
TYPED_TEST(ScudoCombinedTest, PatternOrZeroFill) {
203+
auto *Allocator = this->Allocator.get();
180204

181205
// Ensure that specifying PatternOrZeroFill returns a pattern or zero filled
182206
// block. The primary allocator only produces pattern filled blocks if MTE
@@ -200,7 +224,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
200224
Allocator->deallocate(P, Origin, Size);
201225
}
202226
}
203-
Allocator->releaseToOS();
227+
}
228+
229+
TYPED_TEST(ScudoCombinedTest, BlockReuse) {
230+
auto *Allocator = this->Allocator.get();
204231

205232
// Verify that a chunk will end up being reused, at some point.
206233
const scudo::uptr NeedleSize = 1024U;
@@ -215,12 +242,14 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
215242
Allocator->deallocate(P, Origin);
216243
}
217244
EXPECT_TRUE(Found);
245+
}
218246

219-
constexpr scudo::uptr MaxSize = TypeParam::Primary::SizeClassMap::MaxSize;
247+
TYPED_TEST(ScudoCombinedTest, ReallocateLarge) {
248+
auto *Allocator = this->Allocator.get();
220249

221250
// Reallocate a large chunk all the way down to a byte, verifying that we
222251
// preserve the data in the process.
223-
scudo::uptr Size = MaxSize * 2;
252+
scudo::uptr Size = TypeParam::Primary::SizeClassMap::MaxSize * 2;
224253
const scudo::uptr DataSize = 2048U;
225254
void *P = Allocator->allocate(Size, Origin);
226255
const char Marker = 0xab;
@@ -234,13 +263,19 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
234263
P = NewP;
235264
}
236265
Allocator->deallocate(P, Origin);
266+
}
267+
268+
TYPED_TEST(ScudoCombinedTest, ReallocateSame) {
269+
auto *Allocator = this->Allocator.get();
237270

238271
// Check that reallocating a chunk to a slightly smaller or larger size
239272
// returns the same chunk. This requires that all the sizes we iterate on use
240273
// the same block size, but that should be the case for MaxSize - 64 with our
241274
// default class size maps.
242-
constexpr scudo::uptr ReallocSize = MaxSize - 64;
243-
P = Allocator->allocate(ReallocSize, Origin);
275+
constexpr scudo::uptr ReallocSize =
276+
TypeParam::Primary::SizeClassMap::MaxSize - 64;
277+
void *P = Allocator->allocate(ReallocSize, Origin);
278+
const char Marker = 0xab;
244279
memset(P, Marker, ReallocSize);
245280
for (scudo::sptr Delta = -32; Delta < 32; Delta += 8) {
246281
const scudo::uptr NewSize = ReallocSize + Delta;
@@ -251,31 +286,33 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
251286
checkMemoryTaggingMaybe(Allocator, NewP, NewSize, 0);
252287
}
253288
Allocator->deallocate(P, Origin);
289+
}
254290

291+
TYPED_TEST(ScudoCombinedTest, IterateOverChunks) {
292+
auto *Allocator = this->Allocator.get();
255293
// Allocates a bunch of chunks, then iterate over all the chunks, ensuring
256294
// they are the ones we allocated. This requires the allocator to not have any
257295
// other allocated chunk at this point (eg: won't work with the Quarantine).
258-
if (!UseQuarantine) {
259-
std::vector<void *> V;
260-
for (scudo::uptr I = 0; I < 64U; I++)
261-
V.push_back(Allocator->allocate(rand() % (MaxSize / 2U), Origin));
262-
Allocator->disable();
263-
Allocator->iterateOverChunks(
264-
0U, static_cast<scudo::uptr>(SCUDO_MMAP_RANGE_SIZE - 1),
265-
[](uintptr_t Base, size_t Size, void *Arg) {
266-
std::vector<void *> *V = reinterpret_cast<std::vector<void *> *>(Arg);
267-
void *P = reinterpret_cast<void *>(Base);
268-
EXPECT_NE(std::find(V->begin(), V->end(), P), V->end());
269-
},
270-
reinterpret_cast<void *>(&V));
271-
Allocator->enable();
272-
while (!V.empty()) {
273-
Allocator->deallocate(V.back(), Origin);
274-
V.pop_back();
275-
}
276-
}
296+
std::vector<void *> V;
297+
for (scudo::uptr I = 0; I < 64U; I++)
298+
V.push_back(Allocator->allocate(
299+
rand() % (TypeParam::Primary::SizeClassMap::MaxSize / 2U), Origin));
300+
Allocator->disable();
301+
Allocator->iterateOverChunks(
302+
0U, static_cast<scudo::uptr>(SCUDO_MMAP_RANGE_SIZE - 1),
303+
[](uintptr_t Base, size_t Size, void *Arg) {
304+
std::vector<void *> *V = reinterpret_cast<std::vector<void *> *>(Arg);
305+
void *P = reinterpret_cast<void *>(Base);
306+
EXPECT_NE(std::find(V->begin(), V->end(), P), V->end());
307+
},
308+
reinterpret_cast<void *>(&V));
309+
Allocator->enable();
310+
for (auto P : V)
311+
Allocator->deallocate(P, Origin);
312+
}
277313

278-
Allocator->releaseToOS();
314+
TYPED_TEST(ScudoCombinedTest, UseAfterFree) {
315+
auto *Allocator = this->Allocator.get();
279316

280317
// Check that use-after-free is detected.
281318
for (scudo::uptr SizeLog = 0U; SizeLog <= 20U; SizeLog++) {
@@ -299,6 +336,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
299336
},
300337
"");
301338
}
339+
}
340+
341+
TYPED_TEST(ScudoCombinedTest, DisableMemoryTagging) {
342+
auto *Allocator = this->Allocator.get();
302343

303344
if (Allocator->useMemoryTaggingTestOnly()) {
304345
// Check that disabling memory tagging works correctly.
@@ -320,6 +361,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
320361
// Re-enable them now.
321362
scudo::enableMemoryTagChecksTestOnly();
322363
}
364+
}
365+
366+
TYPED_TEST(ScudoCombinedTest, Stats) {
367+
auto *Allocator = this->Allocator.get();
323368

324369
scudo::uptr BufferSize = 8192;
325370
std::vector<char> Buffer(BufferSize);
@@ -335,6 +380,17 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
335380
EXPECT_NE(Stats.find("Stats: SizeClassAllocator"), std::string::npos);
336381
EXPECT_NE(Stats.find("Stats: MapAllocator"), std::string::npos);
337382
EXPECT_NE(Stats.find("Stats: Quarantine"), std::string::npos);
383+
}
384+
385+
TYPED_TEST(ScudoCombinedTest, CacheDrain) {
386+
auto *Allocator = this->Allocator.get();
387+
388+
std::vector<void *> V;
389+
for (scudo::uptr I = 0; I < 64U; I++)
390+
V.push_back(Allocator->allocate(
391+
rand() % (TypeParam::Primary::SizeClassMap::MaxSize / 2U), Origin));
392+
for (auto P : V)
393+
Allocator->deallocate(P, Origin);
338394

339395
bool UnlockRequired;
340396
auto *TSD = Allocator->getTSDRegistry()->getTSDAndLock(&UnlockRequired);

0 commit comments

Comments
 (0)