Skip to content

Commit 7171a11

Browse files
[GR-60085] [GR-68380] [GR-53964] [GR-68546] Use a contiguous address space for the metaspace and support heap dumping.
PullRequest: graal/21740
2 parents 8931ba3 + b1f791f commit 7171a11

File tree

50 files changed

+832
-512
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

50 files changed

+832
-512
lines changed

substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/AddressRangeCommittedMemoryProvider.java

Lines changed: 112 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@
6161
import com.oracle.svm.core.locks.VMMutex;
6262
import com.oracle.svm.core.log.Log;
6363
import com.oracle.svm.core.memory.NullableNativeMemory;
64+
import com.oracle.svm.core.metaspace.Metaspace;
6465
import com.oracle.svm.core.nmt.NativeMemoryTracking;
6566
import com.oracle.svm.core.nmt.NmtCategory;
6667
import com.oracle.svm.core.os.ChunkBasedCommittedMemoryProvider;
@@ -77,7 +78,8 @@
7778

7879
/**
7980
* Reserves a fixed-size address range and provides memory from it by committing and uncommitting
80-
* virtual memory within that range.
81+
* virtual memory within that range. The address space is shared by the null regions, the
82+
* {@link Metaspace}, the image heap, and the collected Java heap.
8183
* <p>
8284
* The main objective of this code is to keep external fragmentation low so that an
8385
* {@linkplain Isolate} is unlikely to run out of memory because its address space is exhausted. To
@@ -122,59 +124,65 @@ public class AddressRangeCommittedMemoryProvider extends ChunkBasedCommittedMemo
122124
*/
123125
private final VMMutex lock = new VMMutex("freeList");
124126

125-
/** Contains free blocks that are large enough to fit allocations. */
127+
protected UnsignedWord reservedAddressSpaceSize;
128+
private Pointer metaspaceBegin;
129+
private Pointer metaspaceTop;
130+
private Pointer metaspaceEnd;
131+
protected Pointer collectedHeapBegin;
132+
protected UnsignedWord collectedHeapSize;
133+
134+
/**
135+
* Contains free blocks for the collected Java heap that are large enough to fit allocations.
136+
*/
126137
protected FreeListNode allocListHead;
127138
protected long allocListCount;
128139

129-
/** Contains all free blocks, including small blocks that are needed for coalescing. */
140+
/**
141+
* Contains all free blocks for the collected Java heap, including small blocks that are needed
142+
* for coalescing.
143+
*/
130144
protected FreeListNode unusedListHead;
131145
protected long unusedListCount;
132146

133-
protected UnsignedWord reservedAddressSpaceSize;
134-
protected UnsignedWord reservedMetaspaceSize;
135-
136-
protected Pointer collectedHeapBegin;
137-
protected UnsignedWord collectedHeapSize;
138-
139147
@Platforms(Platform.HOSTED_ONLY.class)
140148
public AddressRangeCommittedMemoryProvider() {
141149
assert SubstrateOptions.SpawnIsolates.getValue();
142150
}
143151

144152
@Override
145153
@Uninterruptible(reason = "Still being initialized.")
146-
public int initialize(WordPointer heapBasePointer, IsolateArguments arguments) {
147-
UnsignedWord reserved = Word.unsigned(IsolateArgumentAccess.readLong(arguments, IsolateArgumentParser.getOptionIndex(SubstrateGCOptions.ReservedAddressSpaceSize)));
148-
if (reserved.equal(0)) {
154+
public int initialize(WordPointer heapBaseOut, IsolateArguments arguments) {
155+
UnsignedWord reservedSize = Word.unsigned(IsolateArgumentAccess.readLong(arguments, IsolateArgumentParser.getOptionIndex(SubstrateGCOptions.ReservedAddressSpaceSize)));
156+
if (reservedSize.equal(0)) {
149157
/*
150158
* Reserve a 32 GB address space, except if a larger heap size was specified, or if the
151159
* maximum address space size is less than that.
152160
*/
153161
UnsignedWord maxHeapSize = Word.unsigned(IsolateArgumentAccess.readLong(arguments, IsolateArgumentParser.getOptionIndex(SubstrateGCOptions.MaxHeapSize)));
154-
reserved = UnsignedUtils.max(maxHeapSize, Word.unsigned(MIN_RESERVED_ADDRESS_SPACE_SIZE));
162+
reservedSize = UnsignedUtils.max(maxHeapSize, Word.unsigned(MIN_RESERVED_ADDRESS_SPACE_SIZE));
155163
}
156-
reserved = UnsignedUtils.min(reserved, ReferenceAccess.singleton().getMaxAddressSpaceSize());
164+
reservedSize = UnsignedUtils.min(reservedSize, ReferenceAccess.singleton().getMaxAddressSpaceSize());
157165

158-
UnsignedWord alignment = unsigned(Heap.getHeap().getPreferredAddressSpaceAlignment());
159-
WordPointer beginOut = StackValue.get(WordPointer.class);
160-
int errorCode = reserveHeapMemory(reserved, alignment, arguments, beginOut);
166+
UnsignedWord alignment = unsigned(Heap.getHeap().getHeapBaseAlignment());
167+
WordPointer reservedBeginPtr = StackValue.get(WordPointer.class);
168+
int errorCode = reserveHeapMemory(reservedSize, alignment, arguments, reservedBeginPtr);
161169
if (errorCode != CEntryPointErrors.NO_ERROR) {
162170
return errorCode;
163171
}
164172

165-
Pointer begin = beginOut.read();
173+
Pointer reservedBegin = reservedBeginPtr.read();
166174
WordPointer imageHeapEndOut = StackValue.get(WordPointer.class);
167-
errorCode = ImageHeapProvider.get().initialize(begin, reserved, heapBasePointer, imageHeapEndOut);
175+
errorCode = ImageHeapProvider.get().initialize(reservedBegin, reservedSize, heapBaseOut, imageHeapEndOut);
168176
if (errorCode != CEntryPointErrors.NO_ERROR) {
169-
freeOnInitializeError(begin, reserved);
177+
freeOnInitializeError(reservedBegin, reservedSize);
170178
return errorCode;
171179
}
172180

173-
CEntryPointSnippets.initBaseRegisters(heapBasePointer.read());
181+
CEntryPointSnippets.initBaseRegisters(heapBaseOut.read());
174182
WordPointer runtimeHeapBeginOut = StackValue.get(WordPointer.class);
175-
errorCode = getCollectedHeapBegin(arguments, begin, reserved, imageHeapEndOut.read(), runtimeHeapBeginOut);
183+
errorCode = initializeCollectedHeapBegin(arguments, reservedBegin, reservedSize, imageHeapEndOut.read(), runtimeHeapBeginOut);
176184
if (errorCode != CEntryPointErrors.NO_ERROR) {
177-
freeOnInitializeError(begin, reserved);
185+
freeOnInitializeError(reservedBegin, reservedSize);
178186
return errorCode;
179187
}
180188

@@ -183,40 +191,58 @@ public int initialize(WordPointer heapBasePointer, IsolateArguments arguments) {
183191
* because the image heap was not initialized when we were called, so we invoke a static
184192
* method that loads a new reference to our instance.
185193
*/
186-
errorCode = initialize(begin, reserved, runtimeHeapBeginOut.read());
194+
errorCode = initialize(reservedBegin, reservedSize, runtimeHeapBeginOut.read());
187195
if (errorCode != CEntryPointErrors.NO_ERROR) {
188-
freeOnInitializeError(begin, reserved);
196+
freeOnInitializeError(reservedBegin, reservedSize);
189197
}
190198
return errorCode;
191199
}
192200

193201
@Uninterruptible(reason = "Still being initialized.")
194-
protected int getCollectedHeapBegin(@SuppressWarnings("unused") IsolateArguments arguments, @SuppressWarnings("unused") Pointer begin, @SuppressWarnings("unused") UnsignedWord reserved,
195-
Pointer imageHeapEnd, WordPointer collectedHeapBeginOut) {
196-
Pointer result = roundUp(imageHeapEnd, getGranularity());
197-
collectedHeapBeginOut.write(result);
202+
protected int initializeCollectedHeapBegin(@SuppressWarnings("unused") IsolateArguments arguments, @SuppressWarnings("unused") Pointer reservedBegin,
203+
@SuppressWarnings("unused") UnsignedWord reservedSize, Pointer imageHeapEnd, WordPointer collectedHeapBeginOut) {
204+
assert PointerUtils.isAMultiple(imageHeapEnd, Word.unsigned(SubstrateOptions.getPageSize()));
205+
collectedHeapBeginOut.write(imageHeapEnd);
198206
return CEntryPointErrors.NO_ERROR;
199207
}
200208

201209
@NeverInline("Ensure a newly looked up value is used as 'this', now that the image heap is initialized")
202210
@Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE)
203-
private static int initialize(Pointer spaceBegin, UnsignedWord spaceSize, Pointer collectedHeapBegin) {
204-
if (VMInspectionOptions.hasNativeMemoryTrackingSupport()) {
205-
UnsignedWord imageHeapAddressSpace = ImageHeapProvider.get().getImageHeapAddressSpaceSize();
206-
UnsignedWord javaHeapAddressSpace = spaceSize.subtract(imageHeapAddressSpace);
207-
NativeMemoryTracking.singleton().trackReserve(javaHeapAddressSpace, NmtCategory.JavaHeap);
208-
}
209-
211+
private static int initialize(Pointer reservedBegin, UnsignedWord reservedSize, Pointer collectedHeapBegin) {
210212
AddressRangeCommittedMemoryProvider provider = (AddressRangeCommittedMemoryProvider) ChunkBasedCommittedMemoryProvider.get();
211-
return provider.initializeFields(spaceBegin, spaceSize, collectedHeapBegin);
213+
return provider.initializeFields(reservedBegin, reservedSize, collectedHeapBegin);
212214
}
213215

216+
@Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true)
214217
@SuppressWarnings("hiding")
218+
protected int initializeFields(Pointer reservedBegin, UnsignedWord reservedSize, Pointer collectedHeapBegin) {
219+
this.reservedAddressSpaceSize = reservedSize;
220+
221+
initializeMetaspaceFields();
222+
return initializeCollectedHeapFields(reservedBegin, reservedSize, collectedHeapBegin);
223+
}
224+
215225
@Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true)
216-
protected int initializeFields(Pointer spaceBegin, UnsignedWord reservedSpaceSize, Pointer collectedHeapBegin) {
217-
this.reservedAddressSpaceSize = reservedSpaceSize;
226+
protected void initializeMetaspaceFields() {
227+
int metaspaceSize = SerialAndEpsilonGCOptions.getReservedMetaspaceSize();
228+
this.metaspaceBegin = KnownIntrinsics.heapBase().add(HeapImpl.getMetaspaceOffsetInAddressSpace());
229+
this.metaspaceTop = metaspaceBegin;
230+
this.metaspaceEnd = metaspaceTop.add(metaspaceSize);
231+
232+
if (VMInspectionOptions.hasNativeMemoryTrackingSupport() && metaspaceSize > 0) {
233+
NativeMemoryTracking.singleton().trackReserve(metaspaceSize, NmtCategory.Metaspace);
234+
}
235+
}
236+
237+
@Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true)
238+
@SuppressWarnings("hiding")
239+
private int initializeCollectedHeapFields(Pointer reservedBegin, UnsignedWord reservedSize, Pointer collectedHeapBegin) {
218240
this.collectedHeapBegin = collectedHeapBegin;
219-
this.collectedHeapSize = spaceBegin.add(reservedSpaceSize).subtract(collectedHeapBegin);
241+
this.collectedHeapSize = reservedSize.subtract(collectedHeapBegin.subtract(reservedBegin));
242+
243+
if (VMInspectionOptions.hasNativeMemoryTrackingSupport()) {
244+
NativeMemoryTracking.singleton().trackReserve(collectedHeapSize, NmtCategory.JavaHeap);
245+
}
220246

221247
FreeListNode node = allocNodeOrNull(collectedHeapBegin, collectedHeapSize);
222248
if (node.isNull()) {
@@ -227,10 +253,20 @@ protected int initializeFields(Pointer spaceBegin, UnsignedWord reservedSpaceSiz
227253
this.unusedListCount = 1;
228254
this.allocListHead = node;
229255
this.allocListCount = 1;
230-
231256
return CEntryPointErrors.NO_ERROR;
232257
}
233258

259+
@Override
260+
public UnsignedWord getCollectedHeapAddressSpaceSize() {
261+
return collectedHeapSize;
262+
}
263+
264+
@Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true)
265+
public boolean isInMetaspace(Pointer ptr) {
266+
/* Checking against begin and end does not need any locking. */
267+
return ptr.aboveOrEqual(metaspaceBegin) && ptr.belowThan(metaspaceEnd);
268+
}
269+
234270
@Uninterruptible(reason = "Still being initialized.")
235271
protected int reserveHeapMemory(UnsignedWord reserved, UnsignedWord alignment, IsolateArguments arguments, WordPointer beginOut) {
236272
Pointer begin = reserveHeapMemory0(reserved, alignment, arguments);
@@ -329,29 +365,48 @@ protected int unmapAddressSpace(PointerBase heapBase) {
329365
}
330366

331367
@Override
332-
@Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true)
368+
@Uninterruptible(reason = "Locking without transition requires that the whole critical section is uninterruptible.")
333369
public Pointer allocateMetaspaceChunk(UnsignedWord nbytes, UnsignedWord alignment) {
334-
WordPointer allocOut = UnsafeStackValue.get(WordPointer.class);
335-
int error = allocateInHeapAddressSpace(nbytes, alignment, allocOut);
336-
if (error == NO_ERROR) {
337-
if (VMInspectionOptions.hasNativeMemoryTrackingSupport()) {
338-
NativeMemoryTracking.singleton().trackCommit(nbytes, NmtCategory.Metaspace);
339-
}
340-
return allocOut.read();
370+
lock.lockNoTransition();
371+
try {
372+
return allocateMetaspaceChunk0(nbytes, alignment);
373+
} finally {
374+
lock.unlock();
341375
}
342-
throw reportMetaspaceChunkAllocationFailed(error);
343376
}
344377

345-
@Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true)
346-
protected OutOfMemoryError reportMetaspaceChunkAllocationFailed(int error) {
347-
/* Explicitly don't use OutOfMemoryUtil as the metaspace is not part of the Java heap. */
348-
if (error == OUT_OF_ADDRESS_SPACE) {
378+
/**
379+
* This method intentionally does not use {@link OutOfMemoryUtil} when reporting
380+
* {@link OutOfMemoryError}s as the metaspace is not part of the Java heap.
381+
*/
382+
@Uninterruptible(reason = "Locking without transition requires that the whole critical section is uninterruptible.")
383+
private Pointer allocateMetaspaceChunk0(UnsignedWord nbytes, UnsignedWord alignment) {
384+
assert lock.isOwner();
385+
386+
Pointer result = metaspaceTop;
387+
Pointer newTop = metaspaceTop.add(nbytes);
388+
assert result.isNonNull();
389+
assert PointerUtils.isAMultiple(result, alignment);
390+
assert UnsignedUtils.isAMultiple(newTop, alignment);
391+
392+
/* Check if the allocation fits into the reserved address space. */
393+
if (newTop.aboveThan(metaspaceEnd)) {
349394
throw OUT_OF_METASPACE;
350-
} else if (error == COMMIT_FAILED) {
395+
}
396+
397+
/* Try to commit the memory. */
398+
int access = VirtualMemoryProvider.Access.READ | VirtualMemoryProvider.Access.WRITE;
399+
Pointer actualBegin = VirtualMemoryProvider.get().commit(result, nbytes, access);
400+
if (actualBegin.isNull()) {
351401
throw METASPACE_CHUNK_COMMIT_FAILED;
352-
} else {
353-
throw VMError.shouldNotReachHereAtRuntime();
354402
}
403+
404+
/* Update top and NMT statistics. */
405+
metaspaceTop = newTop;
406+
if (VMInspectionOptions.hasNativeMemoryTrackingSupport()) {
407+
NativeMemoryTracking.singleton().trackCommit(nbytes, NmtCategory.Metaspace);
408+
}
409+
return actualBegin;
355410
}
356411

357412
@Override
@@ -821,11 +876,6 @@ public UnsignedWord getReservedAddressSpaceSize() {
821876
return reservedAddressSpaceSize;
822877
}
823878

824-
@Override
825-
public UnsignedWord getReservedMetaspaceSize() {
826-
return reservedMetaspaceSize;
827-
}
828-
829879
/** Keeps track of unused memory. */
830880
@RawStructure
831881
protected interface FreeListNode extends PointerBase {

substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/ChunkedImageHeapLayouter.java

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
import com.oracle.svm.core.genscavenge.ChunkedImageHeapAllocator.Chunk;
3737
import com.oracle.svm.core.genscavenge.ChunkedImageHeapAllocator.UnalignedChunk;
3838
import com.oracle.svm.core.genscavenge.remset.RememberedSet;
39+
import com.oracle.svm.core.heap.Heap;
3940
import com.oracle.svm.core.hub.DynamicHub;
4041
import com.oracle.svm.core.image.ImageHeap;
4142
import com.oracle.svm.core.image.ImageHeapLayoutInfo;
@@ -88,6 +89,8 @@ public class ChunkedImageHeapLayouter implements ImageHeapLayouter {
8889
/** @param startOffset Offset relative to the heap base. */
8990
@SuppressWarnings("this-escape")
9091
public ChunkedImageHeapLayouter(ImageHeapInfo heapInfo, long startOffset) {
92+
assert startOffset % Heap.getHeap().getImageHeapAlignment() == 0 : "the start of each image heap must be aligned";
93+
9194
this.partitions = new ChunkedImageHeapPartition[PARTITION_COUNT];
9295
this.partitions[READ_ONLY_REGULAR] = new ChunkedImageHeapPartition("readOnly", false, false);
9396
this.partitions[READ_ONLY_RELOCATABLE] = new ChunkedImageHeapPartition("readOnlyRelocatable", false, false);
@@ -98,6 +101,7 @@ public ChunkedImageHeapLayouter(ImageHeapInfo heapInfo, long startOffset) {
98101

99102
this.heapInfo = heapInfo;
100103
this.startOffset = startOffset;
104+
101105
UnsignedWord alignedHeaderSize = RememberedSet.get().getHeaderSizeOfAlignedChunk();
102106
UnsignedWord hugeThreshold = HeapParameters.getAlignedHeapChunkSize().subtract(alignedHeaderSize);
103107
this.hugeObjectThreshold = hugeThreshold.rawValue();
@@ -172,7 +176,6 @@ public ImageHeapLayoutInfo layout(ImageHeap imageHeap, int pageSize, ImageHeapLa
172176
assert partition.getStartOffset() % objectAlignment == 0 : partition;
173177
assert (partition.getStartOffset() + partition.getSize()) % objectAlignment == 0 : partition;
174178
}
175-
assert layoutInfo.getImageHeapSize() % pageSize == 0 : "Image heap size is not a multiple of page size";
176179
return layoutInfo;
177180
}
178181

@@ -223,8 +226,7 @@ private ImageHeapLayoutInfo populateInfoObjects(int dynamicHubCount, int pageSiz
223226
long writableSize = writableEnd - offsetOfFirstWritableAlignedChunk;
224227
/* Aligning the end to the page size can be required for mapping into memory. */
225228
long imageHeapEnd = NumUtil.roundUp(getReadOnlyHuge().getStartOffset() + getReadOnlyHuge().getSize(), pageSize);
226-
long imageHeapSize = imageHeapEnd - startOffset;
227-
return new ImageHeapLayoutInfo(startOffset, imageHeapSize, offsetOfFirstWritableAlignedChunk, writableSize, getReadOnlyRelocatable().getStartOffset(), getReadOnlyRelocatable().getSize(),
229+
return new ImageHeapLayoutInfo(startOffset, imageHeapEnd, offsetOfFirstWritableAlignedChunk, writableSize, getReadOnlyRelocatable().getStartOffset(), getReadOnlyRelocatable().getSize(),
228230
getWritablePatched().getStartOffset(), getWritablePatched().getSize());
229231
}
230232

substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/CompactingOldGeneration.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@
5353
import com.oracle.svm.core.heap.ObjectHeader;
5454
import com.oracle.svm.core.heap.ObjectVisitor;
5555
import com.oracle.svm.core.log.Log;
56+
import com.oracle.svm.core.metaspace.Metaspace;
5657
import com.oracle.svm.core.thread.VMThreads;
5758
import com.oracle.svm.core.threadlocal.VMThreadLocalSupport;
5859
import com.oracle.svm.core.util.Timer;
@@ -361,7 +362,7 @@ private void fixupImageHeapRoots(ImageHeapInfo info) {
361362

362363
@Uninterruptible(reason = "Avoid unnecessary safepoint checks in GC for performance.")
363364
private void fixupMetaspace() {
364-
if (!MetaspaceImpl.isSupported()) {
365+
if (!Metaspace.isSupported()) {
365366
return;
366367
}
367368

substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/GCImpl.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@
9292
import com.oracle.svm.core.jfr.JfrTicks;
9393
import com.oracle.svm.core.jfr.events.AllocationRequiringGCEvent;
9494
import com.oracle.svm.core.log.Log;
95+
import com.oracle.svm.core.metaspace.Metaspace;
9596
import com.oracle.svm.core.os.ChunkBasedCommittedMemoryProvider;
9697
import com.oracle.svm.core.snippets.ImplicitExceptions;
9798
import com.oracle.svm.core.snippets.KnownIntrinsics;
@@ -968,7 +969,7 @@ private void blackenDirtyCardRoots() {
968969

969970
@Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true)
970971
private void blackenMetaspace() {
971-
if (!MetaspaceImpl.isSupported()) {
972+
if (!Metaspace.isSupported()) {
972973
return;
973974
}
974975

0 commit comments

Comments
 (0)