@@ -406,10 +406,7 @@ class HugePageAwareAllocator final : public PageAllocatorInterface {
406406#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
407407 using FinalizeType = Span*;
408408#else // !TCMALLOC_INTERNAL_LEGACY_LOCKING
409- struct FinalizeType {
410- Range r;
411- bool donated = false ;
412- };
409+ using FinalizeType = AllocationState;
413410#endif // !TCMALLOC_INTERNAL_LEGACY_LOCKING
414411
415412 FinalizeType LockAndAlloc (Length n, SpanAllocInfo span_alloc_info,
@@ -441,6 +438,7 @@ class HugePageAwareAllocator final : public PageAllocatorInterface {
441438 FinalizeType Finalize (Range r);
442439
443440 Span* Spanify (FinalizeType f);
441+ Range Unspanify (FinalizeType f);
444442
445443 // Whether this HPAA should use subrelease. This delegates to the appropriate
446444 // parameter depending whether this is for the cold heap or another heap.
@@ -674,14 +672,16 @@ inline Span* HugePageAwareAllocator<Forwarder>::New(
674672 Length n, SpanAllocInfo span_alloc_info) {
675673 TC_CHECK_GT (n, Length (0 ));
676674 bool from_released;
677- Span* s = Spanify (LockAndAlloc (n, span_alloc_info, &from_released));
678- if (s) {
675+ FinalizeType f = LockAndAlloc (n, span_alloc_info, &from_released);
676+ if (f) {
677+ Range r = Unspanify (f);
679678 // Prefetch for writing, as we anticipate using the memory soon.
680- PrefetchW (s-> start_address ());
679+ PrefetchW (r. p . start_addr ());
681680 if (from_released) {
682- forwarder_.Back (Range (s-> first_page (), s-> num_pages ()) );
681+ forwarder_.Back (r );
683682 }
684683 }
684+ Span* s = Spanify (f);
685685 TC_ASSERT (!s || GetMemoryTag (s->start_address ()) == tag_);
686686 return s;
687687}
@@ -726,11 +726,14 @@ inline Span* HugePageAwareAllocator<Forwarder>::NewAligned(
726726 PageHeapSpinLockHolder l;
727727 f = AllocRawHugepages (n, span_alloc_info, &from_released);
728728 }
729- Span* s = Spanify (f);
730- if (s && from_released) {
731- forwarder_.Back (Range (s->first_page (), s->num_pages ()));
729+ if (f && from_released) {
730+ Range r = Unspanify (f);
731+ // Prefetch for writing, as we anticipate using the memory soon.
732+ PrefetchW (r.p .start_addr ());
733+ forwarder_.Back (r);
732734 }
733735
736+ Span* s = Spanify (f);
734737 TC_ASSERT (!s || GetMemoryTag (s->start_address ()) == tag_);
735738 return s;
736739}
@@ -752,6 +755,16 @@ inline Span* HugePageAwareAllocator<Forwarder>::Spanify(FinalizeType f) {
752755#endif
753756}
754757
758+ template <class Forwarder >
759+ inline Range HugePageAwareAllocator<Forwarder>::Unspanify(FinalizeType f) {
760+ #ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
761+ TC_ASSERT (f);
762+ return Range (f->first_page (), f->num_pages ());
763+ #else
764+ return f.r ;
765+ #endif
766+ }
767+
755768template <class Forwarder >
756769inline void HugePageAwareAllocator<Forwarder>::DeleteFromHugepage(
757770 FillerType::Tracker* pt, Range r, bool might_abandon) {
0 commit comments