19
19
#include < vector>
20
20
21
21
static constexpr scudo::Chunk::Origin Origin = scudo::Chunk::Origin::Malloc;
22
+ static constexpr scudo::uptr MinAlignLog = FIRST_32_SECOND_64(3U , 4U );
22
23
23
24
// Fuchsia complains that the function is not used.
24
25
UNUSED static void disableDebuggerdMaybe () {
@@ -95,7 +96,12 @@ template <class Config> struct ScudoCombinedTest : public ::testing::Test {
95
96
UseQuarantine = std::is_same<Config, scudo::AndroidConfig>::value;
96
97
Allocator = std::make_unique<AllocatorT>();
97
98
}
98
- ~ScudoCombinedTest () { UseQuarantine = true ; }
99
+ ~ScudoCombinedTest () {
100
+ Allocator->releaseToOS ();
101
+ UseQuarantine = true ;
102
+ }
103
+
104
+ void BasicTest (scudo::uptr SizeLogMin, scudo::uptr SizeLogMax);
99
105
100
106
using AllocatorT = TestAllocator<Config>;
101
107
std::unique_ptr<AllocatorT> Allocator;
@@ -111,7 +117,7 @@ using ScudoCombinedTestTypes = testing::Types<scudo::AndroidSvelteConfig,
111
117
>;
112
118
TYPED_TEST_CASE (ScudoCombinedTest, ScudoCombinedTestTypes);
113
119
114
- TYPED_TEST (ScudoCombinedTest, BasicCombined ) {
120
+ TYPED_TEST (ScudoCombinedTest, IsOwned ) {
115
121
auto *Allocator = this ->Allocator .get ();
116
122
static scudo::u8 StaticBuffer[scudo::Chunk::getHeaderSize () + 1 ];
117
123
EXPECT_FALSE (
@@ -123,13 +129,17 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
123
129
EXPECT_FALSE (Allocator->isOwned (&StackBuffer[scudo::Chunk::getHeaderSize ()]));
124
130
for (scudo::uptr I = 0 ; I < sizeof (StackBuffer); I++)
125
131
EXPECT_EQ (StackBuffer[I], 0x42U );
132
+ }
126
133
127
- constexpr scudo::uptr MinAlignLog = FIRST_32_SECOND_64 (3U , 4U );
134
+ template <class Config >
135
+ void ScudoCombinedTest<Config>::BasicTest(scudo::uptr SizeLogMin,
136
+ scudo::uptr SizeLogMax) {
137
+ auto *Allocator = this ->Allocator .get ();
128
138
129
139
// This allocates and deallocates a bunch of chunks, with a wide range of
130
140
// sizes and alignments, with a focus on sizes that could trigger weird
131
141
// behaviors (plus or minus a small delta of a power of two for example).
132
- for (scudo::uptr SizeLog = 0U ; SizeLog <= 20U ; SizeLog++) {
142
+ for (scudo::uptr SizeLog = SizeLogMin ; SizeLog <= SizeLogMax ; SizeLog++) {
133
143
for (scudo::uptr AlignLog = MinAlignLog; AlignLog <= 16U ; AlignLog++) {
134
144
const scudo::uptr Align = 1U << AlignLog;
135
145
for (scudo::sptr Delta = -32 ; Delta <= 32 ; Delta++) {
@@ -147,7 +157,15 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
147
157
}
148
158
}
149
159
}
150
- Allocator->releaseToOS ();
160
+ }
161
+
162
+ TYPED_TEST (ScudoCombinedTest, BasicCombined0) { this ->BasicTest (0 , 16 ); }
163
+ TYPED_TEST (ScudoCombinedTest, BasicCombined1) { this ->BasicTest (17 , 18 ); }
164
+ TYPED_TEST (ScudoCombinedTest, BasicCombined2) { this ->BasicTest (19 , 19 ); }
165
+ TYPED_TEST (ScudoCombinedTest, BasicCombined3) { this ->BasicTest (20 , 20 ); }
166
+
167
+ TYPED_TEST (ScudoCombinedTest, ZeroContents) {
168
+ auto *Allocator = this ->Allocator .get ();
151
169
152
170
// Ensure that specifying ZeroContents returns a zero'd out block.
153
171
for (scudo::uptr SizeLog = 0U ; SizeLog <= 20U ; SizeLog++) {
@@ -161,7 +179,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
161
179
Allocator->deallocate (P, Origin, Size);
162
180
}
163
181
}
164
- Allocator->releaseToOS ();
182
+ }
183
+
184
+ TYPED_TEST (ScudoCombinedTest, ZeroFill) {
185
+ auto *Allocator = this ->Allocator .get ();
165
186
166
187
// Ensure that specifying ZeroContents returns a zero'd out block.
167
188
Allocator->setFillContents (scudo::ZeroFill);
@@ -176,7 +197,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
176
197
Allocator->deallocate (P, Origin, Size);
177
198
}
178
199
}
179
- Allocator->releaseToOS ();
200
+ }
201
+
202
+ TYPED_TEST (ScudoCombinedTest, PatternOrZeroFill) {
203
+ auto *Allocator = this ->Allocator .get ();
180
204
181
205
// Ensure that specifying PatternOrZeroFill returns a pattern or zero filled
182
206
// block. The primary allocator only produces pattern filled blocks if MTE
@@ -200,7 +224,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
200
224
Allocator->deallocate (P, Origin, Size);
201
225
}
202
226
}
203
- Allocator->releaseToOS ();
227
+ }
228
+
229
+ TYPED_TEST (ScudoCombinedTest, BlockReuse) {
230
+ auto *Allocator = this ->Allocator .get ();
204
231
205
232
// Verify that a chunk will end up being reused, at some point.
206
233
const scudo::uptr NeedleSize = 1024U ;
@@ -215,12 +242,14 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
215
242
Allocator->deallocate (P, Origin);
216
243
}
217
244
EXPECT_TRUE (Found);
245
+ }
218
246
219
- constexpr scudo::uptr MaxSize = TypeParam::Primary::SizeClassMap::MaxSize;
247
+ TYPED_TEST (ScudoCombinedTest, ReallocateLarge) {
248
+ auto *Allocator = this ->Allocator .get ();
220
249
221
250
// Reallocate a large chunk all the way down to a byte, verifying that we
222
251
// preserve the data in the process.
223
- scudo::uptr Size = MaxSize * 2 ;
252
+ scudo::uptr Size = TypeParam::Primary::SizeClassMap:: MaxSize * 2 ;
224
253
const scudo::uptr DataSize = 2048U ;
225
254
void *P = Allocator->allocate (Size, Origin);
226
255
const char Marker = 0xab ;
@@ -234,13 +263,19 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
234
263
P = NewP;
235
264
}
236
265
Allocator->deallocate (P, Origin);
266
+ }
267
+
268
+ TYPED_TEST (ScudoCombinedTest, ReallocateSame) {
269
+ auto *Allocator = this ->Allocator .get ();
237
270
238
271
// Check that reallocating a chunk to a slightly smaller or larger size
239
272
// returns the same chunk. This requires that all the sizes we iterate on use
240
273
// the same block size, but that should be the case for MaxSize - 64 with our
241
274
// default class size maps.
242
- constexpr scudo::uptr ReallocSize = MaxSize - 64 ;
243
- P = Allocator->allocate (ReallocSize, Origin);
275
+ constexpr scudo::uptr ReallocSize =
276
+ TypeParam::Primary::SizeClassMap::MaxSize - 64 ;
277
+ void *P = Allocator->allocate (ReallocSize, Origin);
278
+ const char Marker = 0xab ;
244
279
memset (P, Marker, ReallocSize);
245
280
for (scudo::sptr Delta = -32 ; Delta < 32 ; Delta += 8 ) {
246
281
const scudo::uptr NewSize = ReallocSize + Delta;
@@ -251,31 +286,33 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
251
286
checkMemoryTaggingMaybe (Allocator, NewP, NewSize, 0 );
252
287
}
253
288
Allocator->deallocate (P, Origin);
289
+ }
254
290
291
+ TYPED_TEST (ScudoCombinedTest, IterateOverChunks) {
292
+ auto *Allocator = this ->Allocator .get ();
255
293
// Allocates a bunch of chunks, then iterate over all the chunks, ensuring
256
294
// they are the ones we allocated. This requires the allocator to not have any
257
295
// other allocated chunk at this point (eg: won't work with the Quarantine).
258
- if (!UseQuarantine) {
259
- std::vector<void *> V;
260
- for (scudo::uptr I = 0 ; I < 64U ; I++)
261
- V.push_back (Allocator->allocate (rand () % (MaxSize / 2U ), Origin));
262
- Allocator->disable ();
263
- Allocator->iterateOverChunks (
264
- 0U , static_cast <scudo::uptr>(SCUDO_MMAP_RANGE_SIZE - 1 ),
265
- [](uintptr_t Base, size_t Size, void *Arg) {
266
- std::vector<void *> *V = reinterpret_cast <std::vector<void *> *>(Arg);
267
- void *P = reinterpret_cast <void *>(Base);
268
- EXPECT_NE (std::find (V->begin (), V->end (), P), V->end ());
269
- },
270
- reinterpret_cast <void *>(&V));
271
- Allocator->enable ();
272
- while (!V.empty ()) {
273
- Allocator->deallocate (V.back (), Origin);
274
- V.pop_back ();
275
- }
276
- }
296
+ std::vector<void *> V;
297
+ for (scudo::uptr I = 0 ; I < 64U ; I++)
298
+ V.push_back (Allocator->allocate (
299
+ rand () % (TypeParam::Primary::SizeClassMap::MaxSize / 2U ), Origin));
300
+ Allocator->disable ();
301
+ Allocator->iterateOverChunks (
302
+ 0U , static_cast <scudo::uptr>(SCUDO_MMAP_RANGE_SIZE - 1 ),
303
+ [](uintptr_t Base, size_t Size, void *Arg) {
304
+ std::vector<void *> *V = reinterpret_cast <std::vector<void *> *>(Arg);
305
+ void *P = reinterpret_cast <void *>(Base);
306
+ EXPECT_NE (std::find (V->begin (), V->end (), P), V->end ());
307
+ },
308
+ reinterpret_cast <void *>(&V));
309
+ Allocator->enable ();
310
+ for (auto P : V)
311
+ Allocator->deallocate (P, Origin);
312
+ }
277
313
278
- Allocator->releaseToOS ();
314
+ TYPED_TEST (ScudoCombinedTest, UseAfterFree) {
315
+ auto *Allocator = this ->Allocator .get ();
279
316
280
317
// Check that use-after-free is detected.
281
318
for (scudo::uptr SizeLog = 0U ; SizeLog <= 20U ; SizeLog++) {
@@ -299,6 +336,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
299
336
},
300
337
" " );
301
338
}
339
+ }
340
+
341
+ TYPED_TEST (ScudoCombinedTest, DisableMemoryTagging) {
342
+ auto *Allocator = this ->Allocator .get ();
302
343
303
344
if (Allocator->useMemoryTaggingTestOnly ()) {
304
345
// Check that disabling memory tagging works correctly.
@@ -320,6 +361,10 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
320
361
// Re-enable them now.
321
362
scudo::enableMemoryTagChecksTestOnly ();
322
363
}
364
+ }
365
+
366
+ TYPED_TEST (ScudoCombinedTest, Stats) {
367
+ auto *Allocator = this ->Allocator .get ();
323
368
324
369
scudo::uptr BufferSize = 8192 ;
325
370
std::vector<char > Buffer (BufferSize);
@@ -335,6 +380,17 @@ TYPED_TEST(ScudoCombinedTest, BasicCombined) {
335
380
EXPECT_NE (Stats.find (" Stats: SizeClassAllocator" ), std::string::npos);
336
381
EXPECT_NE (Stats.find (" Stats: MapAllocator" ), std::string::npos);
337
382
EXPECT_NE (Stats.find (" Stats: Quarantine" ), std::string::npos);
383
+ }
384
+
385
+ TYPED_TEST (ScudoCombinedTest, CacheDrain) {
386
+ auto *Allocator = this ->Allocator .get ();
387
+
388
+ std::vector<void *> V;
389
+ for (scudo::uptr I = 0 ; I < 64U ; I++)
390
+ V.push_back (Allocator->allocate (
391
+ rand () % (TypeParam::Primary::SizeClassMap::MaxSize / 2U ), Origin));
392
+ for (auto P : V)
393
+ Allocator->deallocate (P, Origin);
338
394
339
395
bool UnlockRequired;
340
396
auto *TSD = Allocator->getTSDRegistry ()->getTSDAndLock (&UnlockRequired);
0 commit comments