@@ -200,17 +200,20 @@ class ABSL_CACHELINE_ALIGNED Span final : public SpanList::Elem {
200200 //
201201 // If the freelist becomes full, we do not push the object onto the freelist.
202202 [[nodiscard]] bool FreelistPushBatch (absl::Span<void * absl_nonnull> batch,
203- size_t size, uint32_t reciprocal);
203+ size_t size,
204+ uint32_t reciprocal) __restrict__;
204205
205206 // Pops up to N objects from the freelist and returns them in the batch array.
206207 // Returns number of objects actually popped.
207- [[nodiscard]] size_t FreelistPopBatch (absl::Span<void *> batch, size_t size);
208+ [[nodiscard]] size_t FreelistPopBatch (absl::Span<void *> batch,
209+ size_t size) __restrict__;
208210
209211 // Initialize freelist to contain all objects in the span.
210212 // Pops up to N objects from the freelist and returns them in the batch array.
211213 // Returns number of objects actually popped.
212214 [[nodiscard]] int BuildFreelist (size_t size, size_t count,
213- absl::Span<void *> batch, uint64_t alloc_time);
215+ absl::Span<void *> batch,
216+ uint64_t alloc_time) __restrict__;
214217
215218 // Prefetch cacheline containing most important span information.
216219 void Prefetch ();
@@ -336,22 +339,23 @@ class ABSL_CACHELINE_ALIGNED Span final : public SpanList::Elem {
336339 // Helper function for converting a pointer to an index.
337340 static ObjIdx OffsetToIdx (uintptr_t offset, uint32_t reciprocal);
338341
339- size_t ListPopBatch (void ** __restrict batch, size_t N, size_t size);
342+ size_t ListPopBatch (void ** __restrict batch, size_t N,
343+ size_t size) __restrict__;
340344
341- bool ListPushBatch (absl::Span<void *> batch, size_t size);
345+ bool ListPushBatch (absl::Span<void *> batch, size_t size) __restrict__ ;
342346
343347 // For spans containing 64 or fewer objects, indicate that the object at the
344348 // index has been returned. Always returns true.
345349 bool BitmapPushBatch (absl::Span<void *> batch, size_t size,
346- uint32_t reciprocal);
350+ uint32_t reciprocal) __restrict__ ;
347351
348352 // A bitmap is used to indicate object availability for spans containing
349353 // 64 or fewer objects.
350- void BuildBitmap (size_t size, size_t count);
354+ void BuildBitmap (size_t size, size_t count) __restrict__ ;
351355
352356 // For spans with 64 or fewer objects populate batch with up to N objects.
353357 // Returns number of objects actually popped.
354- size_t BitmapPopBatch (absl::Span<void *> batch, size_t size);
358+ size_t BitmapPopBatch (absl::Span<void *> batch, size_t size) __restrict__ ;
355359
356360 // Friend class to enable more indepth testing of bitmap code.
357361 friend class SpanTestPeer ;
@@ -390,7 +394,7 @@ inline Span::ObjIdx Span::PtrToIdx(void* ptr, size_t size) const {
390394}
391395
392396inline bool Span::FreelistPushBatch (absl::Span<void *> batch, size_t size,
393- uint32_t reciprocal) {
397+ uint32_t reciprocal) __restrict__ {
394398 TC_ASSERT (!is_large_or_sampled ());
395399 const auto allocated = allocated_.load (std::memory_order_relaxed);
396400 TC_ASSERT_GE (allocated, batch.size ());
@@ -406,7 +410,8 @@ inline bool Span::FreelistPushBatch(absl::Span<void*> batch, size_t size,
406410 return ListPushBatch (batch, size);
407411}
408412
409- inline bool Span::ListPushBatch (absl::Span<void *> batch, size_t size) {
413+ inline bool Span::ListPushBatch (absl::Span<void *> batch,
414+ size_t size) __restrict__ {
410415 if (cache_size_ < kCacheSize ) {
411416 auto cache_writes = std::min (kCacheSize - cache_size_, batch.size ());
412417 for (int i = 0 ; i < cache_writes; ++i) {
@@ -457,7 +462,7 @@ inline Span::ObjIdx Span::BitmapPtrToIdx(void* ptr, size_t size,
457462}
458463
459464inline bool Span::BitmapPushBatch (absl::Span<void *> batch, size_t size,
460- uint32_t reciprocal) {
465+ uint32_t reciprocal) __restrict__ {
461466 size_t before = small_span_state_.bitmap .CountBits ();
462467 for (void * ptr : batch) {
463468 // TODO(djgove) Conversions to offsets can be computed outside of lock.
0 commit comments