@@ -564,27 +564,19 @@ public SubstrateLIRGenerationResult(CompilationIdentifier compilationId, LIR lir
564
564
super (compilationId , lir , frameMapBuilder , registerAllocationConfig , callingConvention );
565
565
this .method = method ;
566
566
567
- /*
568
- * Besides for methods with callee saved registers, we reserve additional stack space
569
- * for lazyDeoptStub too. This is necessary because the lazy deopt stub might read
570
- * callee-saved register values in the callee of the function to be deoptimized, thus
571
- * that stack space must not be overwritten by the lazy deopt stub.
572
- */
573
- if (method .hasCalleeSavedRegisters () || method .getDeoptStubType () == Deoptimizer .StubType .LazyEntryStub ) {
567
+ if (method .hasCalleeSavedRegisters ()) {
574
568
AMD64CalleeSavedRegisters calleeSavedRegisters = AMD64CalleeSavedRegisters .singleton ();
575
569
FrameMap frameMap = ((FrameMapBuilderTool ) frameMapBuilder ).getFrameMap ();
576
570
int registerSaveAreaSizeInBytes = calleeSavedRegisters .getSaveAreaSize ();
577
571
StackSlot calleeSaveArea = frameMap .allocateStackMemory (registerSaveAreaSizeInBytes , frameMap .getTarget ().wordSize );
578
572
579
- if (method .hasCalleeSavedRegisters ()) {
580
- /*
581
- * The offset of the callee save area must be fixed early during image
582
- * generation. It is accessed when compiling methods that have a call with
583
- * callee-saved calling convention. Here we verify that offset computed earlier
584
- * is the same as the offset actually reserved.
585
- */
586
- calleeSavedRegisters .verifySaveAreaOffsetInFrame (calleeSaveArea .getRawOffset ());
587
- }
573
+ /*
574
+ * The offset of the callee save area must be fixed early during image generation.
575
+ * It is accessed when compiling methods that have a call with callee-saved calling
576
+ * convention. Here we verify that offset computed earlier is the same as the offset
577
+ * actually reserved.
578
+ */
579
+ calleeSavedRegisters .verifySaveAreaOffsetInFrame (calleeSaveArea .getRawOffset ());
588
580
}
589
581
590
582
if (method .canDeoptimize () || method .isDeoptTarget ()) {
@@ -1344,9 +1336,8 @@ public void returned(CompilationResultBuilder crb) {
1344
1336
}
1345
1337
1346
1338
/**
1347
- * Generates the prologue of a
1348
- * {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EagerEntryStub} or
1349
- * {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#LazyEntryStub} method.
1339
+ * Generates the prologue of a {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EntryStub}
1340
+ * method.
1350
1341
*/
1351
1342
protected static class DeoptEntryStubContext extends SubstrateAMD64FrameContext {
1352
1343
protected DeoptEntryStubContext (SharedMethod method , CallingConvention callingConvention ) {
@@ -1357,33 +1348,39 @@ protected DeoptEntryStubContext(SharedMethod method, CallingConvention callingCo
1357
1348
public void enter (CompilationResultBuilder tasm ) {
1358
1349
AMD64MacroAssembler asm = (AMD64MacroAssembler ) tasm .asm ;
1359
1350
RegisterConfig registerConfig = tasm .frameMap .getRegisterConfig ();
1351
+ Register frameRegister = registerConfig .getFrameRegister ();
1360
1352
Register gpReturnReg = registerConfig .getReturnRegister (JavaKind .Long );
1361
1353
Register fpReturnReg = registerConfig .getReturnRegister (JavaKind .Double );
1362
-
1363
1354
Register firstArgument = ValueUtil .asRegister (callingConvention .getArgument (0 ));
1364
1355
assert !firstArgument .equals (gpReturnReg ) : "overwriting return register" ;
1356
+
1365
1357
/*
1366
1358
* Since this is the target for all deoptimizations we must mark the start of this
1367
1359
* routine as an indirect target.
1368
1360
*/
1369
1361
asm .maybeEmitIndirectTargetMarker ();
1370
1362
1371
- /* Pass the address of the frame to deoptimize as first argument. */
1372
- asm .movq (firstArgument , registerConfig .getFrameRegister ());
1373
-
1374
- /* Copy the original return registers values into the argument registers. */
1375
- asm .movq (ValueUtil .asRegister (callingConvention .getArgument (1 )), gpReturnReg );
1376
- asm .movdq (ValueUtil .asRegister (callingConvention .getArgument (2 )), fpReturnReg );
1377
-
1378
1363
/*
1379
- * Keep the return address slot. This keeps the stack walkable, which is crucial for the
1380
- * interruptible phase of lazy deoptimization. (The return address points to the deopt
1381
- * stub, while the original return address is stored in the deopt slot.)
1364
+ * Keep the return address slot. The correct return address is written in the stub
1365
+ * itself (read more there). The original return address is stored in the deopt slot.
1382
1366
*
1383
- * This also ensures that the stack pointer is aligned properly.
1367
+ * Keeping this slot also ensures that the stack pointer is aligned properly.
1384
1368
*/
1385
1369
asm .subq (registerConfig .getFrameRegister (), FrameAccess .returnAddressSize ());
1370
+
1386
1371
super .enter (tasm );
1372
+
1373
+ /*
1374
+ * Synthesize the parameters for the deopt stub. This needs to be done after enter() to
1375
+ * avoid overwriting register values that it might save to the stack.
1376
+ */
1377
+
1378
+ /* Pass the address of the frame to deoptimize as first argument. */
1379
+ asm .leaq (firstArgument , new AMD64Address (frameRegister , tasm .frameMap .totalFrameSize ()));
1380
+
1381
+ /* Copy the original return registers values into the argument registers. */
1382
+ asm .movq (ValueUtil .asRegister (callingConvention .getArgument (1 )), gpReturnReg );
1383
+ asm .movdq (ValueUtil .asRegister (callingConvention .getArgument (2 )), fpReturnReg );
1387
1384
}
1388
1385
}
1389
1386
@@ -1392,7 +1389,7 @@ public void enter(CompilationResultBuilder tasm) {
1392
1389
* method.
1393
1390
*
1394
1391
* Note no special handling is necessary for CFI as this will be a direct call from the
1395
- * {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EagerEntryStub }.
1392
+ * {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EntryStub }.
1396
1393
*/
1397
1394
protected static class DeoptExitStubContext extends SubstrateAMD64FrameContext {
1398
1395
protected DeoptExitStubContext (SharedMethod method , CallingConvention callingConvention ) {
@@ -1918,7 +1915,7 @@ protected AMD64MacroAssembler createAssembler(OptionValues options) {
1918
1915
}
1919
1916
1920
1917
protected FrameContext createFrameContext (SharedMethod method , Deoptimizer .StubType stubType , CallingConvention callingConvention ) {
1921
- if (stubType == Deoptimizer .StubType .EagerEntryStub || stubType == Deoptimizer . StubType . LazyEntryStub ) {
1918
+ if (stubType == Deoptimizer .StubType .EntryStub ) {
1922
1919
return new DeoptEntryStubContext (method , callingConvention );
1923
1920
} else if (stubType == Deoptimizer .StubType .ExitStub ) {
1924
1921
return new DeoptExitStubContext (method , callingConvention );
0 commit comments