Skip to content

Commit 963a5cb

Browse files
ckennellycopybara-github
authored andcommitted
Annotate Span as not aliasing with the memory-managed by Span.
PiperOrigin-RevId: 818832638 Change-Id: I2809cbae226102ead533487aca3b46ae6247249e
1 parent 1ac4442 commit 963a5cb

File tree

2 files changed

+23
-16
lines changed

2 files changed

+23
-16
lines changed

tcmalloc/span.cc

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ void* Span::BitmapIdxToPtr(ObjIdx idx, size_t size) const {
145145
return reinterpret_cast<ObjIdx*>(off);
146146
}
147147

148-
size_t Span::BitmapPopBatch(absl::Span<void*> batch, size_t size) {
148+
size_t Span::BitmapPopBatch(absl::Span<void*> batch, size_t size) __restrict__ {
149149
size_t before = small_span_state_.bitmap.CountBits();
150150
size_t count = 0;
151151
// Want to fill the batch either with batch.size() objects, or the number of
@@ -164,7 +164,8 @@ size_t Span::BitmapPopBatch(absl::Span<void*> batch, size_t size) {
164164
return count;
165165
}
166166

167-
size_t Span::FreelistPopBatch(const absl::Span<void*> batch, size_t size) {
167+
size_t Span::FreelistPopBatch(const absl::Span<void*> batch,
168+
size_t size) __restrict__ {
168169
TC_ASSERT(!is_large_or_sampled());
169170
// Handle spans with bitmap.size() or fewer objects using a bitmap. We expect
170171
// spans to frequently hold smaller objects.
@@ -174,7 +175,8 @@ size_t Span::FreelistPopBatch(const absl::Span<void*> batch, size_t size) {
174175
return ListPopBatch(batch.data(), batch.size(), size);
175176
}
176177

177-
size_t Span::ListPopBatch(void** __restrict batch, size_t N, size_t size) {
178+
size_t Span::ListPopBatch(void** __restrict batch, size_t N,
179+
size_t size) __restrict__ {
178180
size_t result = 0;
179181

180182
// Pop from cache.
@@ -237,7 +239,7 @@ uint32_t Span::CalcReciprocal(size_t size) {
237239
return kBitmapScalingDenominator / size;
238240
}
239241

240-
void Span::BuildBitmap(size_t size, size_t count) {
242+
void Span::BuildBitmap(size_t size, size_t count) __restrict__ {
241243
// We are using a bitmap to indicate whether objects are used or not. The
242244
// maximum capacity for the bitmap is bitmap.size() objects.
243245
TC_ASSERT_LE(count, small_span_state_.bitmap.size());
@@ -249,7 +251,7 @@ void Span::BuildBitmap(size_t size, size_t count) {
249251
}
250252

251253
int Span::BuildFreelist(size_t size, size_t count, absl::Span<void*> batch,
252-
uint64_t alloc_time) {
254+
uint64_t alloc_time) __restrict__ {
253255
TC_ASSERT(!is_large_or_sampled());
254256
TC_ASSERT_GT(count, 0);
255257
freelist_ = kListEnd;

tcmalloc/span.h

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -200,17 +200,20 @@ class ABSL_CACHELINE_ALIGNED Span final : public SpanList::Elem {
200200
//
201201
// If the freelist becomes full, we do not push the object onto the freelist.
202202
[[nodiscard]] bool FreelistPushBatch(absl::Span<void* absl_nonnull> batch,
203-
size_t size, uint32_t reciprocal);
203+
size_t size,
204+
uint32_t reciprocal) __restrict__;
204205

205206
// Pops up to N objects from the freelist and returns them in the batch array.
206207
// Returns number of objects actually popped.
207-
[[nodiscard]] size_t FreelistPopBatch(absl::Span<void*> batch, size_t size);
208+
[[nodiscard]] size_t FreelistPopBatch(absl::Span<void*> batch,
209+
size_t size) __restrict__;
208210

209211
// Initialize freelist to contain all objects in the span.
210212
// Pops up to N objects from the freelist and returns them in the batch array.
211213
// Returns number of objects actually popped.
212214
[[nodiscard]] int BuildFreelist(size_t size, size_t count,
213-
absl::Span<void*> batch, uint64_t alloc_time);
215+
absl::Span<void*> batch,
216+
uint64_t alloc_time) __restrict__;
214217

215218
// Prefetch cacheline containing most important span information.
216219
void Prefetch();
@@ -336,22 +339,23 @@ class ABSL_CACHELINE_ALIGNED Span final : public SpanList::Elem {
336339
// Helper function for converting a pointer to an index.
337340
static ObjIdx OffsetToIdx(uintptr_t offset, uint32_t reciprocal);
338341

339-
size_t ListPopBatch(void** __restrict batch, size_t N, size_t size);
342+
size_t ListPopBatch(void** __restrict batch, size_t N,
343+
size_t size) __restrict__;
340344

341-
bool ListPushBatch(absl::Span<void*> batch, size_t size);
345+
bool ListPushBatch(absl::Span<void*> batch, size_t size) __restrict__;
342346

343347
// For spans containing 64 or fewer objects, indicate that the object at the
344348
// index has been returned. Always returns true.
345349
bool BitmapPushBatch(absl::Span<void*> batch, size_t size,
346-
uint32_t reciprocal);
350+
uint32_t reciprocal) __restrict__;
347351

348352
// A bitmap is used to indicate object availability for spans containing
349353
// 64 or fewer objects.
350-
void BuildBitmap(size_t size, size_t count);
354+
void BuildBitmap(size_t size, size_t count) __restrict__;
351355

352356
// For spans with 64 or fewer objects populate batch with up to N objects.
353357
// Returns number of objects actually popped.
354-
size_t BitmapPopBatch(absl::Span<void*> batch, size_t size);
358+
size_t BitmapPopBatch(absl::Span<void*> batch, size_t size) __restrict__;
355359

356360
// Friend class to enable more indepth testing of bitmap code.
357361
friend class SpanTestPeer;
@@ -390,7 +394,7 @@ inline Span::ObjIdx Span::PtrToIdx(void* ptr, size_t size) const {
390394
}
391395

392396
inline bool Span::FreelistPushBatch(absl::Span<void*> batch, size_t size,
393-
uint32_t reciprocal) {
397+
uint32_t reciprocal) __restrict__ {
394398
TC_ASSERT(!is_large_or_sampled());
395399
const auto allocated = allocated_.load(std::memory_order_relaxed);
396400
TC_ASSERT_GE(allocated, batch.size());
@@ -406,7 +410,8 @@ inline bool Span::FreelistPushBatch(absl::Span<void*> batch, size_t size,
406410
return ListPushBatch(batch, size);
407411
}
408412

409-
inline bool Span::ListPushBatch(absl::Span<void*> batch, size_t size) {
413+
inline bool Span::ListPushBatch(absl::Span<void*> batch,
414+
size_t size) __restrict__ {
410415
if (cache_size_ < kCacheSize) {
411416
auto cache_writes = std::min(kCacheSize - cache_size_, batch.size());
412417
for (int i = 0; i < cache_writes; ++i) {
@@ -457,7 +462,7 @@ inline Span::ObjIdx Span::BitmapPtrToIdx(void* ptr, size_t size,
457462
}
458463

459464
inline bool Span::BitmapPushBatch(absl::Span<void*> batch, size_t size,
460-
uint32_t reciprocal) {
465+
uint32_t reciprocal) __restrict__ {
461466
size_t before = small_span_state_.bitmap.CountBits();
462467
for (void* ptr : batch) {
463468
// TODO(djgove) Conversions to offsets can be computed outside of lock.

0 commit comments

Comments
 (0)