26
26
#include " llvm/IR/IRBuilder.h"
27
27
#include " llvm/IR/Instruction.h"
28
28
#include " llvm/IR/Instructions.h"
29
+ #include " llvm/IR/IntrinsicInst.h"
29
30
#include " llvm/IR/IntrinsicsBPF.h"
30
31
#include " llvm/IR/Module.h"
31
32
#include " llvm/IR/Type.h"
@@ -478,9 +479,95 @@ static void aspaceWrapOperand(DenseMap<Value *, Value *> &Cache, Instruction *I,
478
479
}
479
480
}
480
481
482
+ static Value *wrapPtrIfASNotZero (DenseMap<Value *, Value *> &Cache,
483
+ CallInst *CI, Value *P) {
484
+ if (auto *PTy = dyn_cast<PointerType>(P->getType ())) {
485
+ if (PTy->getAddressSpace () == 0 )
486
+ return P;
487
+ }
488
+ return aspaceWrapValue (Cache, CI->getFunction (), P);
489
+ }
490
+
491
+ static Instruction *aspaceMemSet (Intrinsic::ID ID,
492
+ DenseMap<Value *, Value *> &Cache,
493
+ CallInst *CI) {
494
+ auto *MI = cast<MemIntrinsic>(CI);
495
+ IRBuilder<> B (CI);
496
+
497
+ Value *OldDst = CI->getArgOperand (0 );
498
+ Value *NewDst = wrapPtrIfASNotZero (Cache, CI, OldDst);
499
+ if (OldDst == NewDst)
500
+ return nullptr ;
501
+
502
+ // memset(new_dst, val, len, align, isvolatile, md)
503
+ Value *Val = CI->getArgOperand (1 );
504
+ Value *Len = CI->getArgOperand (2 );
505
+
506
+ auto *MS = cast<MemSetInst>(CI);
507
+ MaybeAlign Align = MS->getDestAlign ();
508
+ bool IsVolatile = MS->isVolatile ();
509
+
510
+ if (ID == Intrinsic::memset)
511
+ return B.CreateMemSet (NewDst, Val, Len, Align, IsVolatile,
512
+ MI->getAAMetadata ());
513
+ else
514
+ return B.CreateMemSetInline (NewDst, Align, Val, Len, IsVolatile,
515
+ MI->getAAMetadata ());
516
+ }
517
+
518
+ static Instruction *aspaceMemCpy (Intrinsic::ID ID,
519
+ DenseMap<Value *, Value *> &Cache,
520
+ CallInst *CI) {
521
+ auto *MI = cast<MemIntrinsic>(CI);
522
+ IRBuilder<> B (CI);
523
+
524
+ Value *OldDst = CI->getArgOperand (0 );
525
+ Value *OldSrc = CI->getArgOperand (1 );
526
+ Value *NewDst = wrapPtrIfASNotZero (Cache, CI, OldDst);
527
+ Value *NewSrc = wrapPtrIfASNotZero (Cache, CI, OldSrc);
528
+ if (OldDst == NewDst && OldSrc == NewSrc)
529
+ return nullptr ;
530
+
531
+ // memcpy(new_dst, dst_align, new_src, src_align, len, isvolatile, md)
532
+ Value *Len = CI->getArgOperand (2 );
533
+
534
+ auto *MT = cast<MemTransferInst>(CI);
535
+ MaybeAlign DstAlign = MT->getDestAlign ();
536
+ MaybeAlign SrcAlign = MT->getSourceAlign ();
537
+ bool IsVolatile = MT->isVolatile ();
538
+
539
+ return B.CreateMemTransferInst (ID, NewDst, DstAlign, NewSrc, SrcAlign, Len,
540
+ IsVolatile, MI->getAAMetadata ());
541
+ }
542
+
543
+ static Instruction *aspaceMemMove (DenseMap<Value *, Value *> &Cache,
544
+ CallInst *CI) {
545
+ auto *MI = cast<MemIntrinsic>(CI);
546
+ IRBuilder<> B (CI);
547
+
548
+ Value *OldDst = CI->getArgOperand (0 );
549
+ Value *OldSrc = CI->getArgOperand (1 );
550
+ Value *NewDst = wrapPtrIfASNotZero (Cache, CI, OldDst);
551
+ Value *NewSrc = wrapPtrIfASNotZero (Cache, CI, OldSrc);
552
+ if (OldDst == NewDst && OldSrc == NewSrc)
553
+ return nullptr ;
554
+
555
+ // memmove(new_dst, dst_align, new_src, src_align, len, isvolatile, md)
556
+ Value *Len = CI->getArgOperand (2 );
557
+
558
+ auto *MT = cast<MemTransferInst>(CI);
559
+ MaybeAlign DstAlign = MT->getDestAlign ();
560
+ MaybeAlign SrcAlign = MT->getSourceAlign ();
561
+ bool IsVolatile = MT->isVolatile ();
562
+
563
+ return B.CreateMemMove (NewDst, DstAlign, NewSrc, SrcAlign, Len, IsVolatile,
564
+ MI->getAAMetadata ());
565
+ }
566
+
481
567
// Support for BPF address spaces:
482
568
// - for each function in the module M, update pointer operand of
483
569
// each memory access instruction (load/store/cmpxchg/atomicrmw)
570
+ // or intrinsic call insns (memset/memcpy/memmove)
484
571
// by casting it from non-zero address space to zero address space, e.g:
485
572
//
486
573
// (load (ptr addrspace (N) %p) ...)
@@ -493,21 +580,60 @@ bool BPFCheckAndAdjustIR::insertASpaceCasts(Module &M) {
493
580
for (Function &F : M) {
494
581
DenseMap<Value *, Value *> CastsCache;
495
582
for (BasicBlock &BB : F) {
496
- for (Instruction &I : BB ) {
583
+ for (Instruction &I : llvm::make_early_inc_range (BB) ) {
497
584
unsigned PtrOpNum;
498
585
499
- if (auto *LD = dyn_cast<LoadInst>(&I))
586
+ if (auto *LD = dyn_cast<LoadInst>(&I)) {
500
587
PtrOpNum = LD->getPointerOperandIndex ();
501
- else if (auto *ST = dyn_cast<StoreInst>(&I))
588
+ aspaceWrapOperand (CastsCache, &I, PtrOpNum);
589
+ continue ;
590
+ }
591
+ if (auto *ST = dyn_cast<StoreInst>(&I)) {
502
592
PtrOpNum = ST->getPointerOperandIndex ();
503
- else if (auto *CmpXchg = dyn_cast<AtomicCmpXchgInst>(&I))
593
+ aspaceWrapOperand (CastsCache, &I, PtrOpNum);
594
+ continue ;
595
+ }
596
+ if (auto *CmpXchg = dyn_cast<AtomicCmpXchgInst>(&I)) {
504
597
PtrOpNum = CmpXchg->getPointerOperandIndex ();
505
- else if (auto *RMW = dyn_cast<AtomicRMWInst>(&I))
598
+ aspaceWrapOperand (CastsCache, &I, PtrOpNum);
599
+ continue ;
600
+ }
601
+ if (auto *RMW = dyn_cast<AtomicRMWInst>(&I)) {
506
602
PtrOpNum = RMW->getPointerOperandIndex ();
603
+ aspaceWrapOperand (CastsCache, &I, PtrOpNum);
604
+ continue ;
605
+ }
606
+
607
+ auto *CI = dyn_cast<CallInst>(&I);
608
+ if (!CI)
609
+ continue ;
610
+
611
+ Function *Callee = CI->getCalledFunction ();
612
+ if (!Callee || !Callee->isIntrinsic ())
613
+ continue ;
614
+
615
+ // Check memset/memcpy/memmove
616
+ Intrinsic::ID ID = Callee->getIntrinsicID ();
617
+ bool IsSet = ID == Intrinsic::memset || ID == Intrinsic::memset_inline;
618
+ bool IsCpy = ID == Intrinsic::memcpy || ID == Intrinsic::memcpy_inline;
619
+ bool IsMove = ID == Intrinsic::memmove;
620
+ if (!IsSet && !IsCpy && !IsMove)
621
+ continue ;
622
+
623
+ Instruction *New;
624
+ if (IsSet)
625
+ New = aspaceMemSet (ID, CastsCache, CI);
626
+ else if (IsCpy)
627
+ New = aspaceMemCpy (ID, CastsCache, CI);
507
628
else
629
+ New = aspaceMemMove (CastsCache, CI);
630
+
631
+ if (!New)
508
632
continue ;
509
633
510
- aspaceWrapOperand (CastsCache, &I, PtrOpNum);
634
+ I.replaceAllUsesWith (New);
635
+ New->takeName (&I);
636
+ I.eraseFromParent ();
511
637
}
512
638
}
513
639
Changed |= !CastsCache.empty ();
0 commit comments