Skip to content

Commit 3f37d7d

Browse files
committed
Add swift async test
1 parent 1bdb986 commit 3f37d7d

File tree

2 files changed

+72
-22
lines changed

2 files changed

+72
-22
lines changed

llvm/lib/Target/AArch64/AArch64PrologueEpilogue.cpp

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1350,30 +1350,8 @@ void AArch64EpilogueEmitter::emitEpilogue() {
13501350
--SEHEpilogueStartI;
13511351
}
13521352

1353-
if (HasFP && AFI->hasSwiftAsyncContext())
1354-
emitSwiftAsyncContextFramePointer(EpilogueEndI, DL);
1355-
13561353
const StackOffset &SVEStackSize = AFL.getSVEStackSize(MF);
13571354

1358-
// If there is a single SP update, insert it before the ret and we're done.
1359-
if (CombineSPBump) {
1360-
assert(!SVEStackSize && "Cannot combine SP bump with SVE");
1361-
1362-
// When we are about to restore the CSRs, the CFA register is SP again.
1363-
if (EmitCFI && HasFP)
1364-
CFIInstBuilder(MBB, FirstGPRRestoreI, MachineInstr::FrameDestroy)
1365-
.buildDefCFA(AArch64::SP, NumBytes);
1366-
1367-
emitFrameOffset(MBB, MBB.getFirstTerminator(), DL, AArch64::SP, AArch64::SP,
1368-
StackOffset::getFixed(NumBytes + AfterCSRPopSize), TII,
1369-
MachineInstr::FrameDestroy, false, NeedsWinCFI, &HasWinCFI,
1370-
EmitCFI, StackOffset::getFixed(NumBytes));
1371-
return;
1372-
}
1373-
1374-
NumBytes -= PrologueSaveSize;
1375-
assert(NumBytes >= 0 && "Negative stack allocation size!?");
1376-
13771355
// Process the SVE callee-saves to determine what space needs to be
13781356
// deallocated.
13791357
StackOffset DeallocateBefore = {}, DeallocateAfter = SVEStackSize;
@@ -1397,6 +1375,28 @@ void AArch64EpilogueEmitter::emitEpilogue() {
13971375
DeallocateAfter = CalleeSavedSizeAsOffset;
13981376
}
13991377

1378+
if (HasFP && AFI->hasSwiftAsyncContext())
1379+
emitSwiftAsyncContextFramePointer(EpilogueEndI, DL);
1380+
1381+
// If there is a single SP update, insert it before the ret and we're done.
1382+
if (CombineSPBump) {
1383+
assert(!SVEStackSize && "Cannot combine SP bump with SVE");
1384+
1385+
// When we are about to restore the CSRs, the CFA register is SP again.
1386+
if (EmitCFI && HasFP)
1387+
CFIInstBuilder(MBB, FirstGPRRestoreI, MachineInstr::FrameDestroy)
1388+
.buildDefCFA(AArch64::SP, NumBytes);
1389+
1390+
emitFrameOffset(MBB, MBB.getFirstTerminator(), DL, AArch64::SP, AArch64::SP,
1391+
StackOffset::getFixed(NumBytes + AfterCSRPopSize), TII,
1392+
MachineInstr::FrameDestroy, false, NeedsWinCFI, &HasWinCFI,
1393+
EmitCFI, StackOffset::getFixed(NumBytes));
1394+
return;
1395+
}
1396+
1397+
NumBytes -= PrologueSaveSize;
1398+
assert(NumBytes >= 0 && "Negative stack allocation size!?");
1399+
14001400
// Deallocate the SVE area.
14011401
if (FPAfterSVECalleeSaves) {
14021402
// If the callee-save area is before FP, restoring the FP implicitly

llvm/test/CodeGen/AArch64/win-sve.ll

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1560,3 +1560,53 @@ define tailcc void @f15(double %d, <vscale x 4 x i32> %vs, [9 x i64], i32 %i) {
15601560
store i32 %i, ptr %a
15611561
ret void
15621562
}
1563+
1564+
declare ptr @llvm.swift.async.context.addr()
1565+
1566+
define void @f16(ptr swiftasync %ctx, <vscale x 2 x i64> %foo) {
1567+
; CHECK-LABEL: f16:
1568+
; CHECK: .seh_proc f16
1569+
; CHECK-NEXT: // %bb.0:
1570+
; CHECK-NEXT: orr x29, x29, #0x1000000000000000
1571+
; CHECK-NEXT: .seh_nop
1572+
; CHECK-NEXT: addvl sp, sp, #-1
1573+
; CHECK-NEXT: .seh_allocz 1
1574+
; CHECK-NEXT: str z8, [sp] // 16-byte Folded Spill
1575+
; CHECK-NEXT: .seh_save_zreg z8, 0
1576+
; CHECK-NEXT: sub sp, sp, #32
1577+
; CHECK-NEXT: .seh_stackalloc 32
1578+
; CHECK-NEXT: stp x29, x30, [sp, #8] // 16-byte Folded Spill
1579+
; CHECK-NEXT: .seh_save_fplr 8
1580+
; CHECK-NEXT: str x22, [sp]
1581+
; CHECK-NEXT: .seh_nop
1582+
; CHECK-NEXT: add x29, sp, #8
1583+
; CHECK-NEXT: .seh_add_fp 8
1584+
; CHECK-NEXT: .seh_endprologue
1585+
; CHECK-NEXT: sub sp, sp, #16
1586+
; CHECK-NEXT: //APP
1587+
; CHECK-NEXT: //NO_APP
1588+
; CHECK-NEXT: ldr x8, [x22]
1589+
; CHECK-NEXT: stur x8, [x29, #-8]
1590+
; CHECK-NEXT: .seh_startepilogue
1591+
; CHECK-NEXT: add sp, sp, #16
1592+
; CHECK-NEXT: .seh_stackalloc 16
1593+
; CHECK-NEXT: ldp x29, x30, [sp, #8] // 16-byte Folded Reload
1594+
; CHECK-NEXT: add sp, sp, #32
1595+
; CHECK-NEXT: .seh_stackalloc 32
1596+
; CHECK-NEXT: .seh_save_fplr 8
1597+
; CHECK-NEXT: ldr z8, [sp] // 16-byte Folded Reload
1598+
; CHECK-NEXT: .seh_save_zreg z8, 0
1599+
; CHECK-NEXT: and x29, x29, #0xefffffffffffffff
1600+
; CHECK-NEXT: .seh_nop
1601+
; CHECK-NEXT: addvl sp, sp, #1
1602+
; CHECK-NEXT: .seh_allocz 1
1603+
; CHECK-NEXT: .seh_endepilogue
1604+
; CHECK-NEXT: ret
1605+
; CHECK-NEXT: .seh_endfunclet
1606+
; CHECK-NEXT: .seh_endproc
1607+
tail call void asm sideeffect "", "~{z8}"()
1608+
%1 = load ptr, ptr %ctx, align 8
1609+
%2 = tail call ptr @llvm.swift.async.context.addr()
1610+
store ptr %1, ptr %2, align 8
1611+
ret void
1612+
}

0 commit comments

Comments
 (0)