@@ -28,6 +28,10 @@ using namespace llvm;
28
28
#define GET_INSTRINFO_CTOR_DTOR
29
29
#include " SparcGenInstrInfo.inc"
30
30
31
+ static cl::opt<unsigned > BPccDisplacementBits (
32
+ " sparc-bpcc-offset-bits" , cl::Hidden, cl::init(19 ),
33
+ cl::desc(" Restrict range of BPcc/FBPfcc instructions (DEBUG)" ));
34
+
31
35
// Pin the vtable to this file.
32
36
void SparcInstrInfo::anchor () {}
33
37
@@ -73,11 +77,6 @@ unsigned SparcInstrInfo::isStoreToStackSlot(const MachineInstr &MI,
73
77
return 0 ;
74
78
}
75
79
76
- static bool IsIntegerCC (unsigned CC)
77
- {
78
- return (CC <= SPCC::ICC_VC);
79
- }
80
-
81
80
static SPCC::CondCodes GetOppositeBranchCondition (SPCC::CondCodes CC)
82
81
{
83
82
switch (CC) {
@@ -155,9 +154,7 @@ static SPCC::CondCodes GetOppositeBranchCondition(SPCC::CondCodes CC)
155
154
llvm_unreachable (" Invalid cond code" );
156
155
}
157
156
158
- static bool isUncondBranchOpcode (int Opc) {
159
- return Opc == SP::BA || Opc == SP::BPA;
160
- }
157
+ static bool isUncondBranchOpcode (int Opc) { return Opc == SP::BA; }
161
158
162
159
static bool isI32CondBranchOpcode (int Opc) {
163
160
return Opc == SP::BCOND || Opc == SP::BPICC || Opc == SP::BPICCA ||
@@ -169,7 +166,10 @@ static bool isI64CondBranchOpcode(int Opc) {
169
166
Opc == SP::BPXCCANT;
170
167
}
171
168
172
- static bool isFCondBranchOpcode (int Opc) { return Opc == SP::FBCOND; }
169
+ static bool isFCondBranchOpcode (int Opc) {
170
+ return Opc == SP::FBCOND || Opc == SP::FBCONDA || Opc == SP::FBCOND_V9 ||
171
+ Opc == SP::FBCONDA_V9;
172
+ }
173
173
174
174
static bool isCondBranchOpcode (int Opc) {
175
175
return isI32CondBranchOpcode (Opc) || isI64CondBranchOpcode (Opc) ||
@@ -193,6 +193,34 @@ static void parseCondBranch(MachineInstr *LastInst, MachineBasicBlock *&Target,
193
193
Target = LastInst->getOperand (0 ).getMBB ();
194
194
}
195
195
196
+ MachineBasicBlock *
197
+ SparcInstrInfo::getBranchDestBlock (const MachineInstr &MI) const {
198
+ switch (MI.getOpcode ()) {
199
+ default :
200
+ llvm_unreachable (" unexpected opcode!" );
201
+ case SP::BA:
202
+ case SP::BCOND:
203
+ case SP::BCONDA:
204
+ case SP::FBCOND:
205
+ case SP::FBCONDA:
206
+ case SP::BPICC:
207
+ case SP::BPICCA:
208
+ case SP::BPICCNT:
209
+ case SP::BPICCANT:
210
+ case SP::BPXCC:
211
+ case SP::BPXCCA:
212
+ case SP::BPXCCNT:
213
+ case SP::BPXCCANT:
214
+ case SP::BPFCC:
215
+ case SP::BPFCCA:
216
+ case SP::BPFCCNT:
217
+ case SP::BPFCCANT:
218
+ case SP::FBCOND_V9:
219
+ case SP::FBCONDA_V9:
220
+ return MI.getOperand (0 ).getMBB ();
221
+ }
222
+ }
223
+
196
224
bool SparcInstrInfo::analyzeBranch (MachineBasicBlock &MBB,
197
225
MachineBasicBlock *&TBB,
198
226
MachineBasicBlock *&FBB,
@@ -285,36 +313,37 @@ unsigned SparcInstrInfo::insertBranch(MachineBasicBlock &MBB,
285
313
assert (TBB && " insertBranch must not be told to insert a fallthrough" );
286
314
assert ((Cond.size () <= 2 ) &&
287
315
" Sparc branch conditions should have at most two components!" );
288
- assert (!BytesAdded && " code size not handled" );
289
316
290
317
if (Cond.empty ()) {
291
318
assert (!FBB && " Unconditional branch with multiple successors!" );
292
- BuildMI (&MBB, DL, get (Subtarget.isV9 () ? SP::BPA : SP::BA)).addMBB (TBB);
319
+ BuildMI (&MBB, DL, get (SP::BA)).addMBB (TBB);
320
+ if (BytesAdded)
321
+ *BytesAdded = 8 ;
293
322
return 1 ;
294
323
}
295
324
296
325
// Conditional branch
297
326
unsigned Opc = Cond[0 ].getImm ();
298
327
unsigned CC = Cond[1 ].getImm ();
328
+ BuildMI (&MBB, DL, get (Opc)).addMBB (TBB).addImm (CC);
299
329
300
- if (IsIntegerCC (CC)) {
301
- BuildMI (&MBB, DL, get (Opc)).addMBB (TBB).addImm (CC);
302
- } else {
303
- BuildMI (&MBB, DL, get (SP::FBCOND)).addMBB (TBB).addImm (CC);
304
- }
305
- if (!FBB)
330
+ if (!FBB) {
331
+ if (BytesAdded)
332
+ *BytesAdded = 8 ;
306
333
return 1 ;
334
+ }
307
335
308
- BuildMI (&MBB, DL, get (Subtarget.isV9 () ? SP::BPA : SP::BA)).addMBB (FBB);
336
+ BuildMI (&MBB, DL, get (SP::BA)).addMBB (FBB);
337
+ if (BytesAdded)
338
+ *BytesAdded = 16 ;
309
339
return 2 ;
310
340
}
311
341
312
342
unsigned SparcInstrInfo::removeBranch (MachineBasicBlock &MBB,
313
343
int *BytesRemoved) const {
314
- assert (!BytesRemoved && " code size not handled" );
315
-
316
344
MachineBasicBlock::iterator I = MBB.end ();
317
345
unsigned Count = 0 ;
346
+ int Removed = 0 ;
318
347
while (I != MBB.begin ()) {
319
348
--I;
320
349
@@ -326,9 +355,13 @@ unsigned SparcInstrInfo::removeBranch(MachineBasicBlock &MBB,
326
355
break ; // Not a branch
327
356
328
357
I->eraseFromParent ();
358
+ Removed += getInstSizeInBytes (*I);
329
359
I = MBB.end ();
330
360
++Count;
331
361
}
362
+
363
+ if (BytesRemoved)
364
+ *BytesRemoved = Removed;
332
365
return Count;
333
366
}
334
367
@@ -340,6 +373,37 @@ bool SparcInstrInfo::reverseBranchCondition(
340
373
return false ;
341
374
}
342
375
376
+ bool SparcInstrInfo::isBranchOffsetInRange (unsigned BranchOpc,
377
+ int64_t Offset) const {
378
+ assert ((Offset & 0b11 ) == 0 && " Malformed branch offset" );
379
+ switch (BranchOpc) {
380
+ case SP::BA:
381
+ case SP::BCOND:
382
+ case SP::BCONDA:
383
+ case SP::FBCOND:
384
+ case SP::FBCONDA:
385
+ return isIntN (22 , Offset >> 2 );
386
+
387
+ case SP::BPICC:
388
+ case SP::BPICCA:
389
+ case SP::BPICCNT:
390
+ case SP::BPICCANT:
391
+ case SP::BPXCC:
392
+ case SP::BPXCCA:
393
+ case SP::BPXCCNT:
394
+ case SP::BPXCCANT:
395
+ case SP::BPFCC:
396
+ case SP::BPFCCA:
397
+ case SP::BPFCCNT:
398
+ case SP::BPFCCANT:
399
+ case SP::FBCOND_V9:
400
+ case SP::FBCONDA_V9:
401
+ return isIntN (BPccDisplacementBits, Offset >> 2 );
402
+ }
403
+
404
+ llvm_unreachable (" Unknown branch instruction!" );
405
+ }
406
+
343
407
void SparcInstrInfo::copyPhysReg (MachineBasicBlock &MBB,
344
408
MachineBasicBlock::iterator I,
345
409
const DebugLoc &DL, MCRegister DestReg,
@@ -530,6 +594,23 @@ Register SparcInstrInfo::getGlobalBaseReg(MachineFunction *MF) const {
530
594
return GlobalBaseReg;
531
595
}
532
596
597
+ unsigned SparcInstrInfo::getInstSizeInBytes (const MachineInstr &MI) const {
598
+ unsigned Opcode = MI.getOpcode ();
599
+
600
+ if (MI.isInlineAsm ()) {
601
+ const MachineFunction *MF = MI.getParent ()->getParent ();
602
+ const char *AsmStr = MI.getOperand (0 ).getSymbolName ();
603
+ return getInlineAsmLength (AsmStr, *MF->getTarget ().getMCAsmInfo ());
604
+ }
605
+
606
+ // If the instruction has a delay slot, be conservative and also include
607
+ // it for sizing purposes. This is done so that the BranchRelaxation pass
608
+ // will not mistakenly mark out-of-range branches as in-range.
609
+ if (MI.hasDelaySlot ())
610
+ return get (Opcode).getSize () * 2 ;
611
+ return get (Opcode).getSize ();
612
+ }
613
+
533
614
bool SparcInstrInfo::expandPostRAPseudo (MachineInstr &MI) const {
534
615
switch (MI.getOpcode ()) {
535
616
case TargetOpcode::LOAD_STACK_GUARD: {
0 commit comments