2424#include " gc/z/zAddress.inline.hpp"
2525#include " gc/z/zGlobals.hpp"
2626#include " gc/z/zMappedCache.hpp"
27- #include " gc/z/zVirtualMemory .inline.hpp"
27+ #include " gc/z/zMemory .inline.hpp"
2828#include " utilities/align.hpp"
2929#include " utilities/globalDefinitions.hpp"
3030
@@ -43,8 +43,8 @@ class ZMappedCacheEntry {
4343 const uintptr_t this_addr = reinterpret_cast <uintptr_t >(this );
4444 return zoffset_end (align_up (this_addr, ZGranuleSize) - ZAddressHeapBase);
4545 }
46- ZVirtualMemory vmem () const {
47- return ZVirtualMemory (start (), end () - start ());
46+ ZMemoryRange vmem () const {
47+ return ZMemoryRange (start (), end () - start ());
4848 }
4949
5050 ZIntrusiveRBTreeNode* node_addr () { return &_node; }
@@ -81,20 +81,20 @@ static void* entry_address_for_zoffset_end(zoffset_end offset) {
8181 return reinterpret_cast <void *>(end_addr - (cache_lines_per_entry * ZCacheLineSize) * (index + 1 ));
8282}
8383
84- static ZMappedCacheEntry* create_entry (const ZVirtualMemory & vmem) {
84+ static ZMappedCacheEntry* create_entry (const ZMemoryRange & vmem) {
8585 precond (vmem.size () >= ZGranuleSize);
8686 return new (entry_address_for_zoffset_end (vmem.end ())) ZMappedCacheEntry (vmem.start ());
8787}
8888
8989int ZMappedCache::EntryCompare::operator ()(ZIntrusiveRBTreeNode* a, ZIntrusiveRBTreeNode* b) {
90- ZVirtualMemory vmem_a = ZMappedCacheEntry::cast_to_entry (a)->vmem ();
91- ZVirtualMemory vmem_b = ZMappedCacheEntry::cast_to_entry (b)->vmem ();
90+ ZMemoryRange vmem_a = ZMappedCacheEntry::cast_to_entry (a)->vmem ();
91+ ZMemoryRange vmem_b = ZMappedCacheEntry::cast_to_entry (b)->vmem ();
9292 if (vmem_a.end () < vmem_b.start ()) { return -1 ; }
9393 if (vmem_b.end () < vmem_a.start ()) { return 1 ; }
9494 return 0 ; // Overlapping
9595}
9696int ZMappedCache::EntryCompare::operator ()(zoffset key, ZIntrusiveRBTreeNode* node) {
97- ZVirtualMemory vmem = ZMappedCacheEntry::cast_to_entry (node)->vmem ();
97+ ZMemoryRange vmem = ZMappedCacheEntry::cast_to_entry (node)->vmem ();
9898 if (key < vmem.start ()) { return -1 ; }
9999 if (key > vmem.end ()) { return 1 ; }
100100 return 0 ; // Containing
@@ -107,7 +107,7 @@ size_t ZMappedCache::get_size_class(size_t index) {
107107 return SizeClasses[index];
108108}
109109
110- void ZMappedCache::insert (const Tree::FindCursor& cursor, const ZVirtualMemory & vmem) {
110+ void ZMappedCache::insert (const Tree::FindCursor& cursor, const ZMemoryRange & vmem) {
111111 // Create new entry
112112 ZMappedCacheEntry* entry = create_entry (vmem);
113113
@@ -127,7 +127,7 @@ void ZMappedCache::insert(const Tree::FindCursor& cursor, const ZVirtualMemory&
127127 }
128128}
129129
130- void ZMappedCache::remove (const Tree::FindCursor& cursor, const ZVirtualMemory & vmem) {
130+ void ZMappedCache::remove (const Tree::FindCursor& cursor, const ZMemoryRange & vmem) {
131131 ZIntrusiveRBTreeNode* const node = cursor.node ();
132132 ZMappedCacheEntry* entry = ZMappedCacheEntry::cast_to_entry (node);
133133
@@ -148,7 +148,7 @@ void ZMappedCache::remove(const Tree::FindCursor& cursor, const ZVirtualMemory&
148148 entry->~ZMappedCacheEntry ();
149149}
150150
151- void ZMappedCache::replace (const Tree::FindCursor& cursor, const ZVirtualMemory & vmem) {
151+ void ZMappedCache::replace (const Tree::FindCursor& cursor, const ZMemoryRange & vmem) {
152152 // Create new entry
153153 ZMappedCacheEntry* entry = create_entry (vmem);
154154
@@ -177,7 +177,7 @@ void ZMappedCache::replace(const Tree::FindCursor& cursor, const ZVirtualMemory&
177177 old_entry->~ZMappedCacheEntry ();
178178}
179179
180- void ZMappedCache::update (ZMappedCacheEntry* entry, const ZVirtualMemory & vmem) {
180+ void ZMappedCache::update (ZMappedCacheEntry* entry, const ZMemoryRange & vmem) {
181181 assert (entry->end () == vmem.end (), " must be" );
182182 // Remove or add to lists if required
183183 const size_t new_size = vmem.size ();
@@ -205,7 +205,7 @@ void ZMappedCache::update(ZMappedCacheEntry* entry, const ZVirtualMemory& vmem)
205205ZMappedCache::ZMappedCache ()
206206 : _tree(), _size_class_lists{}, _size(0 ), _min(_size) {}
207207
208- void ZMappedCache::insert_mapping (const ZVirtualMemory & vmem) {
208+ void ZMappedCache::insert_mapping (const ZMemoryRange & vmem) {
209209 _size += vmem.size ();
210210 auto current_cursor = _tree.find (vmem.start ());
211211 auto next_cursor = _tree.next (current_cursor);
@@ -214,13 +214,13 @@ void ZMappedCache::insert_mapping(const ZVirtualMemory& vmem) {
214214 ZMappedCacheEntry::cast_to_entry (next_cursor.node ())->start () == vmem.end ();
215215 if (extends_left && extends_right) {
216216 ZIntrusiveRBTreeNode* const next_node = next_cursor.node ();
217- const ZVirtualMemory left_vmem = ZMappedCacheEntry::cast_to_entry (current_cursor.node ())->vmem ();
218- const ZVirtualMemory right_vmem = ZMappedCacheEntry::cast_to_entry (next_node)->vmem ();
217+ const ZMemoryRange left_vmem = ZMappedCacheEntry::cast_to_entry (current_cursor.node ())->vmem ();
218+ const ZMemoryRange right_vmem = ZMappedCacheEntry::cast_to_entry (next_node)->vmem ();
219219 assert (left_vmem.adjacent_to (vmem), " must be" );
220220 assert (vmem.adjacent_to (right_vmem), " must be" );
221- ZVirtualMemory new_vmem = left_vmem;
222- new_vmem.extend (vmem.size ());
223- new_vmem.extend (right_vmem.size ());
221+ ZMemoryRange new_vmem = left_vmem;
222+ new_vmem.grow_from_back (vmem.size ());
223+ new_vmem.grow_from_back (right_vmem.size ());
224224 assert (new_vmem.end () == right_vmem.end (), " must be" );
225225 assert (new_vmem.start () == left_vmem.start (), " must be" );
226226
@@ -232,10 +232,10 @@ void ZMappedCache::insert_mapping(const ZVirtualMemory& vmem) {
232232 }
233233
234234 if (extends_left) {
235- const ZVirtualMemory left_vmem = ZMappedCacheEntry::cast_to_entry (current_cursor.node ())->vmem ();
235+ const ZMemoryRange left_vmem = ZMappedCacheEntry::cast_to_entry (current_cursor.node ())->vmem ();
236236 assert (left_vmem.adjacent_to (vmem), " must be" );
237- ZVirtualMemory new_vmem = left_vmem;
238- new_vmem.extend (vmem.size ());
237+ ZMemoryRange new_vmem = left_vmem;
238+ new_vmem.grow_from_back (vmem.size ());
239239 assert (new_vmem.end () == vmem.end (), " must be" );
240240 assert (new_vmem.start () == left_vmem.start (), " must be" );
241241
@@ -244,10 +244,10 @@ void ZMappedCache::insert_mapping(const ZVirtualMemory& vmem) {
244244 }
245245
246246 if (extends_right) {
247- const ZVirtualMemory right_vmem = ZMappedCacheEntry::cast_to_entry (next_cursor.node ())->vmem ();
247+ const ZMemoryRange right_vmem = ZMappedCacheEntry::cast_to_entry (next_cursor.node ())->vmem ();
248248 assert (vmem.adjacent_to (right_vmem), " must be" );
249- ZVirtualMemory new_vmem = vmem;
250- new_vmem.extend (right_vmem.size ());
249+ ZMemoryRange new_vmem = vmem;
250+ new_vmem.grow_from_back (right_vmem.size ());
251251 assert (new_vmem.start () == vmem.start (), " must be" );
252252 assert (new_vmem.end () == right_vmem.end (), " must be" );
253253 // Update next's start
@@ -260,13 +260,13 @@ void ZMappedCache::insert_mapping(const ZVirtualMemory& vmem) {
260260 insert (current_cursor, vmem);
261261}
262262
263- size_t ZMappedCache::remove_mappings (ZArray<ZVirtualMemory >* mappings, size_t size) {
263+ size_t ZMappedCache::remove_mappings (ZArray<ZMemoryRange >* mappings, size_t size) {
264264 precond (size > 0 );
265265 precond (size % ZGranuleSize == 0 );
266266 size_t removed = 0 ;
267267 const auto remove_mapping = [&](ZIntrusiveRBTreeNode* node) {
268268 ZMappedCacheEntry* entry = ZMappedCacheEntry::cast_to_entry (node);
269- ZVirtualMemory mapped_vmem = entry->vmem ();
269+ ZMemoryRange mapped_vmem = entry->vmem ();
270270 size_t after_remove = removed + mapped_vmem.size ();
271271
272272 if (after_remove <= size) {
@@ -282,7 +282,7 @@ size_t ZMappedCache::remove_mappings(ZArray<ZVirtualMemory>* mappings, size_t si
282282 } else {
283283 const size_t uneeded = after_remove - size;
284284 const size_t needed = mapped_vmem.size () - uneeded;
285- const ZVirtualMemory used = mapped_vmem.split (needed);
285+ const ZMemoryRange used = mapped_vmem.split_from_front (needed);
286286 update (entry, mapped_vmem);
287287 mappings->append (used);
288288 removed = size;
@@ -328,10 +328,10 @@ size_t ZMappedCache::remove_mappings(ZArray<ZVirtualMemory>* mappings, size_t si
328328 return removed;
329329}
330330
331- bool ZMappedCache::remove_mapping_contiguous (ZVirtualMemory * mapping, size_t size) {
331+ bool ZMappedCache::remove_mapping_contiguous (ZMemoryRange * mapping, size_t size) {
332332 const auto remove_mapping = [&](ZIntrusiveRBTreeNode* node) {
333333 ZMappedCacheEntry* entry = ZMappedCacheEntry::cast_to_entry (node);
334- ZVirtualMemory mapped_vmem = entry->vmem ();
334+ ZMemoryRange mapped_vmem = entry->vmem ();
335335
336336 if (mapped_vmem.size () == size) {
337337 auto cursor = _tree.get_cursor (node);
@@ -340,7 +340,7 @@ bool ZMappedCache::remove_mapping_contiguous(ZVirtualMemory* mapping, size_t siz
340340 *mapping = mapped_vmem;
341341 return true ;
342342 } else if (mapped_vmem.size () > size) {
343- const ZVirtualMemory used = mapped_vmem.split (size);
343+ const ZMemoryRange used = mapped_vmem.split_from_front (size);
344344 update (entry, mapped_vmem);
345345 *mapping = used;
346346 return true ;
@@ -391,7 +391,7 @@ size_t ZMappedCache::min() const {
391391 return _min;
392392}
393393
394- size_t ZMappedCache::remove_from_min (ZArray<ZVirtualMemory >* mappings, size_t max_size) {
394+ size_t ZMappedCache::remove_from_min (ZArray<ZMemoryRange >* mappings, size_t max_size) {
395395 const size_t size = MIN2 (_min, max_size);
396396 if (size == 0 ) {
397397 return 0 ;
0 commit comments