43
43
#include " llvm/Analysis/GlobalsModRef.h"
44
44
#include " llvm/Analysis/Loads.h"
45
45
#include " llvm/Analysis/PtrUseVisitor.h"
46
- #include " llvm/Analysis/ValueTracking.h"
47
46
#include " llvm/Config/llvm-config.h"
48
47
#include " llvm/IR/BasicBlock.h"
49
48
#include " llvm/IR/Constant.h"
84
83
#include " llvm/Transforms/Utils/BasicBlockUtils.h"
85
84
#include " llvm/Transforms/Utils/Local.h"
86
85
#include " llvm/Transforms/Utils/PromoteMemToReg.h"
87
- #include " llvm/Transforms/Utils/SSAUpdater.h"
88
86
#include < algorithm>
89
87
#include < cassert>
90
88
#include < cstddef>
@@ -248,7 +246,6 @@ class SROA {
248
246
bool presplitLoadsAndStores (AllocaInst &AI, AllocaSlices &AS);
249
247
AllocaInst *rewritePartition (AllocaInst &AI, AllocaSlices &AS, Partition &P);
250
248
bool splitAlloca (AllocaInst &AI, AllocaSlices &AS);
251
- bool propagateStoredValuesToLoads (AllocaInst &AI, AllocaSlices &AS);
252
249
std::pair<bool /* Changed*/ , bool /* CFGChanged*/ > runOnAlloca (AllocaInst &AI);
253
250
void clobberUse (Use &U);
254
251
bool deleteDeadInstructions (SmallPtrSetImpl<AllocaInst *> &DeletedAllocas);
@@ -601,7 +598,6 @@ class AllocaSlices {
601
598
// / If this is true, the slices are never fully built and should be
602
599
// / ignored.
603
600
bool isEscaped () const { return PointerEscapingInstr; }
604
- bool isEscapedReadOnly () const { return PointerEscapingInstrReadOnly; }
605
601
606
602
// / Support for iterating over the slices.
607
603
// / @{
@@ -684,7 +680,6 @@ class AllocaSlices {
684
680
// / store a pointer to that here and abort trying to form slices of the
685
681
// / alloca. This will be null if the alloca slices are analyzed successfully.
686
682
Instruction *PointerEscapingInstr;
687
- Instruction *PointerEscapingInstrReadOnly;
688
683
689
684
// / The slices of the alloca.
690
685
// /
@@ -1395,26 +1390,14 @@ class AllocaSlices::SliceBuilder : public PtrUseVisitor<SliceBuilder> {
1395
1390
1396
1391
// / Disable SROA entirely if there are unhandled users of the alloca.
1397
1392
void visitInstruction (Instruction &I) { PI.setAborted (&I); }
1398
-
1399
- void visitCallBase (CallBase &CB) {
1400
- // If the call operand is NoCapture ReadOnly, then we mark it as
1401
- // EscapedReadOnly.
1402
- if (CB.doesNotCapture (U->getOperandNo ()) &&
1403
- CB.onlyReadsMemory (U->getOperandNo ())) {
1404
- PI.setEscapedReadOnly (&CB);
1405
- return ;
1406
- }
1407
-
1408
- Base::visitCallBase (CB);
1409
- }
1410
1393
};
1411
1394
1412
1395
AllocaSlices::AllocaSlices (const DataLayout &DL, AllocaInst &AI)
1413
1396
:
1414
1397
#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
1415
1398
AI (AI),
1416
1399
#endif
1417
- PointerEscapingInstr (nullptr ), PointerEscapingInstrReadOnly( nullptr ) {
1400
+ PointerEscapingInstr (nullptr ) {
1418
1401
SliceBuilder PB (DL, AI, *this );
1419
1402
SliceBuilder::PtrInfo PtrI = PB.visitPtr (AI);
1420
1403
if (PtrI.isEscaped () || PtrI.isAborted ()) {
@@ -1425,7 +1408,6 @@ AllocaSlices::AllocaSlices(const DataLayout &DL, AllocaInst &AI)
1425
1408
assert (PointerEscapingInstr && " Did not track a bad instruction" );
1426
1409
return ;
1427
1410
}
1428
- PointerEscapingInstrReadOnly = PtrI.getEscapedReadOnlyInst ();
1429
1411
1430
1412
llvm::erase_if (Slices, [](const Slice &S) { return S.isDead (); });
1431
1413
@@ -1463,9 +1445,6 @@ void AllocaSlices::print(raw_ostream &OS) const {
1463
1445
return ;
1464
1446
}
1465
1447
1466
- if (PointerEscapingInstrReadOnly)
1467
- OS << " Escapes into ReadOnly: " << *PointerEscapingInstrReadOnly << " \n " ;
1468
-
1469
1448
OS << " Slices of alloca: " << AI << " \n " ;
1470
1449
for (const_iterator I = begin (), E = end (); I != E; ++I)
1471
1450
print (OS, I);
@@ -5475,86 +5454,6 @@ void SROA::clobberUse(Use &U) {
5475
5454
}
5476
5455
}
5477
5456
5478
- // / A basic LoadAndStorePromoter that does not remove store nodes.
5479
- class BasicLoadAndStorePromoter : public LoadAndStorePromoter {
5480
- public:
5481
- BasicLoadAndStorePromoter (ArrayRef<const Instruction *> Insts, SSAUpdater &S,
5482
- Type *ZeroType)
5483
- : LoadAndStorePromoter(Insts, S), ZeroType(ZeroType) {}
5484
- bool shouldDelete (Instruction *I) const override {
5485
- return !isa<StoreInst>(I) && !isa<AllocaInst>(I);
5486
- }
5487
-
5488
- Value *getValueToUseForAlloca (Instruction *I) const override {
5489
- return UndefValue::get (ZeroType);
5490
- }
5491
-
5492
- private:
5493
- Type *ZeroType;
5494
- };
5495
-
5496
- bool SROA::propagateStoredValuesToLoads (AllocaInst &AI, AllocaSlices &AS) {
5497
- // Look through each "partition", looking for slices with the same start/end
5498
- // that do not overlap with any before them. The slices are sorted by
5499
- // increasing beginOffset. We don't use AS.partitions(), as it will use a more
5500
- // sophisticated algorithm that takes splittable slices into account.
5501
- auto PartitionBegin = AS.begin ();
5502
- auto PartitionEnd = PartitionBegin;
5503
- uint64_t BeginOffset = PartitionBegin->beginOffset ();
5504
- uint64_t EndOffset = PartitionBegin->endOffset ();
5505
- while (PartitionBegin != AS.end ()) {
5506
- bool AllSameAndValid = true ;
5507
- SmallVector<Instruction *> Insts;
5508
- Type *PartitionType = nullptr ;
5509
- while (PartitionEnd != AS.end () &&
5510
- (PartitionEnd->beginOffset () < EndOffset ||
5511
- PartitionEnd->endOffset () <= EndOffset)) {
5512
- if (AllSameAndValid) {
5513
- AllSameAndValid &= PartitionEnd->beginOffset () == BeginOffset &&
5514
- PartitionEnd->endOffset () == EndOffset;
5515
- Instruction *User =
5516
- cast<Instruction>(PartitionEnd->getUse ()->getUser ());
5517
- if (auto *LI = dyn_cast<LoadInst>(User)) {
5518
- Type *UserTy = LI->getType ();
5519
- // LoadAndStorePromoter requires all the types to be the same.
5520
- if (!LI->isSimple () || (PartitionType && UserTy != PartitionType))
5521
- AllSameAndValid = false ;
5522
- PartitionType = UserTy;
5523
- Insts.push_back (User);
5524
- } else if (auto *SI = dyn_cast<StoreInst>(User)) {
5525
- Type *UserTy = SI->getValueOperand ()->getType ();
5526
- if (!SI->isSimple () || PartitionType && UserTy != PartitionType)
5527
- AllSameAndValid = false ;
5528
- PartitionType = UserTy;
5529
- Insts.push_back (User);
5530
- } else if (!isAssumeLikeIntrinsic (User)) {
5531
- AllSameAndValid = false ;
5532
- }
5533
- }
5534
- EndOffset = std::max (EndOffset, PartitionEnd->endOffset ());
5535
- ++PartitionEnd;
5536
- }
5537
-
5538
- // So long as all the slices start and end offsets matched, update loads to
5539
- // the values stored in the partition.
5540
- if (AllSameAndValid && !Insts.empty ()) {
5541
- LLVM_DEBUG (dbgs () << " Propagate values on slice [" << BeginOffset << " , "
5542
- << EndOffset << " )\n " );
5543
- SmallVector<PHINode *, 4 > NewPHIs;
5544
- SSAUpdater SSA (&NewPHIs);
5545
- Insts.push_back (&AI);
5546
- BasicLoadAndStorePromoter Promoter (Insts, SSA, PartitionType);
5547
- Promoter.run (Insts);
5548
- }
5549
-
5550
- // Step on to the next partition.
5551
- PartitionBegin = PartitionEnd;
5552
- BeginOffset = PartitionBegin->beginOffset ();
5553
- EndOffset = PartitionBegin->endOffset ();
5554
- }
5555
- return true ;
5556
- }
5557
-
5558
5457
// / Analyze an alloca for SROA.
5559
5458
// /
5560
5459
// / This analyzes the alloca to ensure we can reason about it, builds
@@ -5595,11 +5494,6 @@ SROA::runOnAlloca(AllocaInst &AI) {
5595
5494
if (AS.isEscaped ())
5596
5495
return {Changed, CFGChanged};
5597
5496
5598
- if (AS.isEscapedReadOnly ()) {
5599
- Changed |= propagateStoredValuesToLoads (AI, AS);
5600
- return {Changed, CFGChanged};
5601
- }
5602
-
5603
5497
// Delete all the dead users of this alloca before splitting and rewriting it.
5604
5498
for (Instruction *DeadUser : AS.getDeadUsers ()) {
5605
5499
// Free up everything used by this instruction.
0 commit comments