1 //===- AssumeBundleBuilder.cpp - tools to preserve informations -*- C++ -*-===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #define DEBUG_TYPE "assume-builder"
11 #include "llvm/Transforms/Utils/AssumeBundleBuilder.h"
12 #include "llvm/ADT/DepthFirstIterator.h"
13 #include "llvm/ADT/MapVector.h"
14 #include "llvm/ADT/Statistic.h"
15 #include "llvm/Analysis/AssumeBundleQueries.h"
16 #include "llvm/Analysis/AssumptionCache.h"
17 #include "llvm/Analysis/ValueTracking.h"
18 #include "llvm/IR/Dominators.h"
19 #include "llvm/IR/Function.h"
20 #include "llvm/IR/InstIterator.h"
21 #include "llvm/IR/IntrinsicInst.h"
22 #include "llvm/IR/Module.h"
23 #include "llvm/InitializePasses.h"
24 #include "llvm/Support/CommandLine.h"
25 #include "llvm/Support/DebugCounter.h"
26 #include "llvm/Transforms/Utils/Local.h"
30 cl::opt<bool> ShouldPreserveAllAttributes(
31 "assume-preserve-all", cl::init(false), cl::Hidden,
32 cl::desc("enable preservation of all attrbitues. even those that are "
33 "unlikely to be usefull"));
35 cl::opt<bool> EnableKnowledgeRetention(
36 "enable-knowledge-retention", cl::init(false), cl::Hidden,
38 "enable preservation of attributes throughout code transformation"));
40 STATISTIC(NumAssumeBuilt, "Number of assume built by the assume builder");
41 STATISTIC(NumBundlesInAssumes, "Total number of Bundles in the assume built");
42 STATISTIC(NumAssumesMerged,
43 "Number of assume merged by the assume simplify pass");
44 STATISTIC(NumAssumesRemoved,
45 "Number of assume removed by the assume simplify pass");
47 DEBUG_COUNTER(BuildAssumeCounter, "assume-builder-counter",
48 "Controls which assumes gets created");
52 bool isUsefullToPreserve(Attribute::AttrKind Kind) {
54 case Attribute::NonNull:
55 case Attribute::Alignment:
56 case Attribute::Dereferenceable:
57 case Attribute::DereferenceableOrNull:
65 /// This function will try to transform the given knowledge into a more
66 /// canonical one. the canonical knowledge maybe the given one.
67 RetainedKnowledge canonicalizedKnowledge(RetainedKnowledge RK, Module *M) {
68 switch (RK.AttrKind) {
71 case Attribute::NonNull:
72 RK.WasOn = GetUnderlyingObject(RK.WasOn, M->getDataLayout());
74 case Attribute::Alignment: {
75 Value *V = RK.WasOn->stripInBoundsOffsets([&](const Value *Strip) {
76 if (auto *GEP = dyn_cast<GEPOperator>(Strip))
79 GEP->getMaxPreservedAlignment(M->getDataLayout()).value());
84 case Attribute::Dereferenceable:
85 case Attribute::DereferenceableOrNull: {
87 Value *V = GetPointerBaseWithConstantOffset(
88 RK.WasOn, Offset, M->getDataLayout(), /*AllowNonInBounds*/ false);
91 RK.ArgValue = RK.ArgValue + Offset;
98 /// This class contain all knowledge that have been gather while building an
99 /// llvm.assume and the function to manipulate it.
100 struct AssumeBuilderState {
103 using MapKey = std::pair<Value *, Attribute::AttrKind>;
104 SmallMapVector<MapKey, unsigned, 8> AssumedKnowledgeMap;
105 Instruction *InstBeingRemoved = nullptr;
106 AssumptionCache* AC = nullptr;
107 DominatorTree* DT = nullptr;
109 AssumeBuilderState(Module *M, Instruction *I = nullptr,
110 AssumptionCache *AC = nullptr, DominatorTree *DT = nullptr)
111 : M(M), InstBeingRemoved(I), AC(AC), DT(DT) {}
113 bool tryToPreserveWithoutAddingAssume(RetainedKnowledge RK) {
114 if (!InstBeingRemoved || !RK.WasOn)
116 bool HasBeenPreserved = false;
117 Use* ToUpdate = nullptr;
118 getKnowledgeForValue(
119 RK.WasOn, {RK.AttrKind}, AC,
120 [&](RetainedKnowledge RKOther, Instruction *Assume,
121 const CallInst::BundleOpInfo *Bundle) {
122 if (!isValidAssumeForContext(Assume, InstBeingRemoved, DT))
124 if (RKOther.ArgValue >= RK.ArgValue) {
125 HasBeenPreserved = true;
127 } else if (isValidAssumeForContext(InstBeingRemoved, Assume,
129 HasBeenPreserved = true;
130 IntrinsicInst *Intr = cast<IntrinsicInst>(Assume);
131 ToUpdate = &Intr->op_begin()[Bundle->Begin + ABA_Argument];
138 ConstantInt::get(Type::getInt64Ty(M->getContext()), RK.ArgValue));
139 return HasBeenPreserved;
142 bool isKnowledgeWorthPreserving(RetainedKnowledge RK) {
147 if (RK.WasOn->getType()->isPointerTy()) {
148 Value *UnderlyingPtr = GetUnderlyingObject(RK.WasOn, M->getDataLayout());
149 if (isa<AllocaInst>(UnderlyingPtr) || isa<GlobalValue>(UnderlyingPtr))
152 if (auto *Arg = dyn_cast<Argument>(RK.WasOn)) {
153 if (Arg->hasAttribute(RK.AttrKind) &&
154 (!Attribute::doesAttrKindHaveArgument(RK.AttrKind) ||
155 Arg->getAttribute(RK.AttrKind).getValueAsInt() >= RK.ArgValue))
159 if (auto *Inst = dyn_cast<Instruction>(RK.WasOn))
160 if (wouldInstructionBeTriviallyDead(Inst)) {
161 if (RK.WasOn->use_empty())
163 Use *SingleUse = RK.WasOn->getSingleUndroppableUse();
164 if (SingleUse && SingleUse->getUser() == InstBeingRemoved)
170 void addKnowledge(RetainedKnowledge RK) {
171 RK = canonicalizedKnowledge(RK, M);
173 if (!isKnowledgeWorthPreserving(RK))
176 if (tryToPreserveWithoutAddingAssume(RK))
178 MapKey Key{RK.WasOn, RK.AttrKind};
179 auto Lookup = AssumedKnowledgeMap.find(Key);
180 if (Lookup == AssumedKnowledgeMap.end()) {
181 AssumedKnowledgeMap[Key] = RK.ArgValue;
184 assert(((Lookup->second == 0 && RK.ArgValue == 0) ||
185 (Lookup->second != 0 && RK.ArgValue != 0)) &&
186 "inconsistent argument value");
188 /// This is only desirable because for all attributes taking an argument
189 /// higher is better.
190 Lookup->second = std::max(Lookup->second, RK.ArgValue);
193 void addAttribute(Attribute Attr, Value *WasOn) {
194 if (Attr.isTypeAttribute() || Attr.isStringAttribute() ||
195 (!ShouldPreserveAllAttributes &&
196 !isUsefullToPreserve(Attr.getKindAsEnum())))
198 unsigned AttrArg = 0;
199 if (Attr.isIntAttribute())
200 AttrArg = Attr.getValueAsInt();
201 addKnowledge({Attr.getKindAsEnum(), AttrArg, WasOn});
204 void addCall(const CallBase *Call) {
205 auto addAttrList = [&](AttributeList AttrList) {
206 for (unsigned Idx = AttributeList::FirstArgIndex;
207 Idx < AttrList.getNumAttrSets(); Idx++)
208 for (Attribute Attr : AttrList.getAttributes(Idx))
209 addAttribute(Attr, Call->getArgOperand(Idx - 1));
210 for (Attribute Attr : AttrList.getFnAttributes())
211 addAttribute(Attr, nullptr);
213 addAttrList(Call->getAttributes());
214 if (Function *Fn = Call->getCalledFunction())
215 addAttrList(Fn->getAttributes());
218 IntrinsicInst *build() {
219 if (AssumedKnowledgeMap.empty())
221 if (!DebugCounter::shouldExecute(BuildAssumeCounter))
223 Function *FnAssume = Intrinsic::getDeclaration(M, Intrinsic::assume);
224 LLVMContext &C = M->getContext();
225 SmallVector<OperandBundleDef, 8> OpBundle;
226 for (auto &MapElem : AssumedKnowledgeMap) {
227 SmallVector<Value *, 2> Args;
228 if (MapElem.first.first)
229 Args.push_back(MapElem.first.first);
231 /// This is only valid because for all attribute that currently exist a
232 /// value of 0 is useless. and should not be preserved.
234 Args.push_back(ConstantInt::get(Type::getInt64Ty(M->getContext()),
236 OpBundle.push_back(OperandBundleDefT<Value *>(
237 std::string(Attribute::getNameFromAttrKind(MapElem.first.second)),
239 NumBundlesInAssumes++;
242 return cast<IntrinsicInst>(CallInst::Create(
243 FnAssume, ArrayRef<Value *>({ConstantInt::getTrue(C)}), OpBundle));
246 void addAccessedPtr(Instruction *MemInst, Value *Pointer, Type *AccType,
248 unsigned DerefSize = MemInst->getModule()
250 .getTypeStoreSize(AccType)
252 if (DerefSize != 0) {
253 addKnowledge({Attribute::Dereferenceable, DerefSize, Pointer});
254 if (!NullPointerIsDefined(MemInst->getFunction(),
255 Pointer->getType()->getPointerAddressSpace()))
256 addKnowledge({Attribute::NonNull, 0u, Pointer});
258 if (MA.valueOrOne() > 1)
260 {Attribute::Alignment, unsigned(MA.valueOrOne().value()), Pointer});
263 void addInstruction(Instruction *I) {
264 if (auto *Call = dyn_cast<CallBase>(I))
265 return addCall(Call);
266 if (auto *Load = dyn_cast<LoadInst>(I))
267 return addAccessedPtr(I, Load->getPointerOperand(), Load->getType(),
269 if (auto *Store = dyn_cast<StoreInst>(I))
270 return addAccessedPtr(I, Store->getPointerOperand(),
271 Store->getValueOperand()->getType(),
273 // TODO: Add support for the other Instructions.
274 // TODO: Maybe we should look around and merge with other llvm.assume.
280 IntrinsicInst *llvm::buildAssumeFromInst(Instruction *I) {
281 if (!EnableKnowledgeRetention)
283 AssumeBuilderState Builder(I->getModule());
284 Builder.addInstruction(I);
285 return Builder.build();
288 void llvm::salvageKnowledge(Instruction *I, AssumptionCache *AC,
290 if (!EnableKnowledgeRetention || I->isTerminator())
292 AssumeBuilderState Builder(I->getModule(), I, AC, DT);
293 Builder.addInstruction(I);
294 if (IntrinsicInst *Intr = Builder.build()) {
295 Intr->insertBefore(I);
297 AC->registerAssumption(Intr);
303 struct AssumeSimplify {
308 SmallDenseSet<IntrinsicInst *> CleanupToDo;
309 StringMapEntry<uint32_t> *IgnoreTag;
310 SmallDenseMap<BasicBlock *, SmallVector<IntrinsicInst *, 4>, 8> BBToAssume;
311 bool MadeChange = false;
313 AssumeSimplify(Function &F, AssumptionCache &AC, DominatorTree *DT,
315 : F(F), AC(AC), DT(DT), C(C),
316 IgnoreTag(C.getOrInsertBundleTag(IgnoreBundleTag)) {}
318 void buildMapping(bool FilterBooleanArgument) {
320 for (Value *V : AC.assumptions()) {
323 IntrinsicInst *Assume = cast<IntrinsicInst>(V);
324 if (FilterBooleanArgument) {
325 auto *Arg = dyn_cast<ConstantInt>(Assume->getOperand(0));
326 if (!Arg || Arg->isZero())
329 BBToAssume[Assume->getParent()].push_back(Assume);
332 for (auto &Elem : BBToAssume) {
333 llvm::sort(Elem.second,
334 [](const IntrinsicInst *LHS, const IntrinsicInst *RHS) {
335 return LHS->comesBefore(RHS);
340 /// Remove all asumes in CleanupToDo if there boolean argument is true and
341 /// ForceCleanup is set or the assume doesn't hold valuable knowledge.
342 void RunCleanup(bool ForceCleanup) {
343 for (IntrinsicInst *Assume : CleanupToDo) {
344 auto *Arg = dyn_cast<ConstantInt>(Assume->getOperand(0));
345 if (!Arg || Arg->isZero() ||
346 (!ForceCleanup && !isAssumeWithEmptyBundle(*Assume)))
353 Assume->eraseFromParent();
358 /// Remove knowledge stored in assume when it is already know by an attribute
359 /// or an other assume. This can when valid update an existing knowledge in an
360 /// attribute or an other assume.
361 void dropRedundantKnowledge() {
363 IntrinsicInst *Assume;
365 CallInst::BundleOpInfo *BOI;
368 SmallDenseMap<std::pair<Value *, Attribute::AttrKind>,
369 SmallVector<MapValue, 2>, 16>
371 for (BasicBlock *BB : depth_first(&F))
372 for (Value *V : BBToAssume[BB]) {
375 IntrinsicInst *Assume = cast<IntrinsicInst>(V);
376 for (CallInst::BundleOpInfo &BOI : Assume->bundle_op_infos()) {
377 auto RemoveFromAssume = [&]() {
378 CleanupToDo.insert(Assume);
379 if (BOI.Begin != BOI.End) {
380 Use *U = &Assume->op_begin()[BOI.Begin + ABA_WasOn];
381 U->set(UndefValue::get(U->get()->getType()));
385 if (BOI.Tag == IgnoreTag) {
386 CleanupToDo.insert(Assume);
389 RetainedKnowledge RK = getKnowledgeFromBundle(*Assume, BOI);
390 if (auto *Arg = dyn_cast_or_null<Argument>(RK.WasOn)) {
391 bool HasSameKindAttr = Arg->hasAttribute(RK.AttrKind);
393 if (!Attribute::doesAttrKindHaveArgument(RK.AttrKind) ||
394 Arg->getAttribute(RK.AttrKind).getValueAsInt() >=
399 if (isValidAssumeForContext(
400 Assume, &*F.getEntryBlock().getFirstInsertionPt()) ||
401 Assume == &*F.getEntryBlock().getFirstInsertionPt()) {
403 Arg->removeAttr(RK.AttrKind);
404 Arg->addAttr(Attribute::get(C, RK.AttrKind, RK.ArgValue));
410 auto &Lookup = Knowledge[{RK.WasOn, RK.AttrKind}];
411 for (MapValue &Elem : Lookup) {
412 if (!isValidAssumeForContext(Elem.Assume, Assume, DT))
414 if (Elem.ArgValue >= RK.ArgValue) {
417 } else if (isValidAssumeForContext(Assume, Elem.Assume, DT)) {
418 Elem.Assume->op_begin()[Elem.BOI->Begin + ABA_Argument].set(
419 ConstantInt::get(Type::getInt64Ty(C), RK.ArgValue));
425 Lookup.push_back({Assume, RK.ArgValue, &BOI});
430 using MergeIterator = SmallVectorImpl<IntrinsicInst *>::iterator;
432 /// Merge all Assumes from Begin to End in and insert the resulting assume as
433 /// high as possible in the basicblock.
434 void mergeRange(BasicBlock *BB, MergeIterator Begin, MergeIterator End) {
435 if (Begin == End || std::next(Begin) == End)
437 /// Provide no additional information so that AssumeBuilderState doesn't
438 /// try to do any punning since it already has been done better.
439 AssumeBuilderState Builder(F.getParent());
441 /// For now it is initialized to the best value it could have
442 Instruction *InsertPt = BB->getFirstNonPHI();
443 if (isa<LandingPadInst>(InsertPt))
444 InsertPt = InsertPt->getNextNode();
445 for (IntrinsicInst *I : make_range(Begin, End)) {
446 CleanupToDo.insert(I);
447 for (CallInst::BundleOpInfo &BOI : I->bundle_op_infos()) {
448 RetainedKnowledge RK = getKnowledgeFromBundle(*I, BOI);
451 Builder.addKnowledge(RK);
452 if (auto *I = dyn_cast_or_null<Instruction>(RK.WasOn))
453 if (I->getParent() == InsertPt->getParent() &&
454 (InsertPt->comesBefore(I) || InsertPt == I))
455 InsertPt = I->getNextNode();
459 /// Adjust InsertPt if it is before Begin, since mergeAssumes only
460 /// guarantees we can place the resulting assume between Begin and End.
461 if (InsertPt->comesBefore(*Begin))
462 for (auto It = (*Begin)->getIterator(), E = InsertPt->getIterator();
464 if (!isGuaranteedToTransferExecutionToSuccessor(&*It)) {
465 InsertPt = It->getNextNode();
468 IntrinsicInst *MergedAssume = Builder.build();
472 MergedAssume->insertBefore(InsertPt);
473 AC.registerAssumption(MergedAssume);
476 /// Merge assume when they are in the same BasicBlock and for all instruction
477 /// between them isGuaranteedToTransferExecutionToSuccessor returns true.
478 void mergeAssumes() {
481 SmallVector<MergeIterator, 4> SplitPoints;
482 for (auto &Elem : BBToAssume) {
483 SmallVectorImpl<IntrinsicInst *> &AssumesInBB = Elem.second;
484 if (AssumesInBB.size() < 2)
486 /// AssumesInBB is already sorted by order in the block.
488 BasicBlock::iterator It = AssumesInBB.front()->getIterator();
489 BasicBlock::iterator E = AssumesInBB.back()->getIterator();
490 SplitPoints.push_back(AssumesInBB.begin());
491 MergeIterator LastSplit = AssumesInBB.begin();
492 for (; It != E; ++It)
493 if (!isGuaranteedToTransferExecutionToSuccessor(&*It)) {
494 for (; (*LastSplit)->comesBefore(&*It); ++LastSplit)
496 if (SplitPoints.back() != LastSplit)
497 SplitPoints.push_back(LastSplit);
499 SplitPoints.push_back(AssumesInBB.end());
500 for (auto SplitIt = SplitPoints.begin();
501 SplitIt != std::prev(SplitPoints.end()); SplitIt++) {
502 mergeRange(Elem.first, *SplitIt, *(SplitIt + 1));
509 bool simplifyAssumes(Function &F, AssumptionCache *AC, DominatorTree *DT) {
510 AssumeSimplify AS(F, *AC, DT, F.getContext());
512 /// Remove knowledge that is already known by a dominating other assume or an
514 AS.dropRedundantKnowledge();
516 /// Remove assume that are empty.
517 AS.RunCleanup(false);
519 /// Merge assume in the same basicblock when possible.
522 /// Remove assume that were merged.
524 return AS.MadeChange;
529 PreservedAnalyses AssumeSimplifyPass::run(Function &F,
530 FunctionAnalysisManager &AM) {
531 if (!EnableKnowledgeRetention)
532 return PreservedAnalyses::all();
533 simplifyAssumes(F, &AM.getResult<AssumptionAnalysis>(F),
534 AM.getCachedResult<DominatorTreeAnalysis>(F));
535 return PreservedAnalyses::all();
539 class AssumeSimplifyPassLegacyPass : public FunctionPass {
543 AssumeSimplifyPassLegacyPass() : FunctionPass(ID) {
544 initializeAssumeSimplifyPassLegacyPassPass(
545 *PassRegistry::getPassRegistry());
547 bool runOnFunction(Function &F) override {
548 if (skipFunction(F) || !EnableKnowledgeRetention)
550 AssumptionCache &AC =
551 getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
552 DominatorTreeWrapperPass *DTWP =
553 getAnalysisIfAvailable<DominatorTreeWrapperPass>();
554 return simplifyAssumes(F, &AC, DTWP ? &DTWP->getDomTree() : nullptr);
557 void getAnalysisUsage(AnalysisUsage &AU) const override {
558 AU.addRequired<AssumptionCacheTracker>();
560 AU.setPreservesAll();
565 char AssumeSimplifyPassLegacyPass::ID = 0;
567 INITIALIZE_PASS_BEGIN(AssumeSimplifyPassLegacyPass, "assume-simplify",
568 "Assume Simplify", false, false)
569 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
570 INITIALIZE_PASS_END(AssumeSimplifyPassLegacyPass, "assume-simplify",
571 "Assume Simplify", false, false)
573 FunctionPass *llvm::createAssumeSimplifyPass() {
574 return new AssumeSimplifyPassLegacyPass();
577 PreservedAnalyses AssumeBuilderPass::run(Function &F,
578 FunctionAnalysisManager &AM) {
579 AssumptionCache *AC = &AM.getResult<AssumptionAnalysis>(F);
580 DominatorTree* DT = AM.getCachedResult<DominatorTreeAnalysis>(F);
581 for (Instruction &I : instructions(F))
582 salvageKnowledge(&I, AC, DT);
583 return PreservedAnalyses::all();
587 class AssumeBuilderPassLegacyPass : public FunctionPass {
591 AssumeBuilderPassLegacyPass() : FunctionPass(ID) {
592 initializeAssumeBuilderPassLegacyPassPass(*PassRegistry::getPassRegistry());
594 bool runOnFunction(Function &F) override {
595 AssumptionCache &AC =
596 getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
597 DominatorTreeWrapperPass *DTWP =
598 getAnalysisIfAvailable<DominatorTreeWrapperPass>();
599 for (Instruction &I : instructions(F))
600 salvageKnowledge(&I, &AC, DTWP ? &DTWP->getDomTree() : nullptr);
604 void getAnalysisUsage(AnalysisUsage &AU) const override {
605 AU.addRequired<AssumptionCacheTracker>();
607 AU.setPreservesAll();
612 char AssumeBuilderPassLegacyPass::ID = 0;
614 INITIALIZE_PASS_BEGIN(AssumeBuilderPassLegacyPass, "assume-builder",
615 "Assume Builder", false, false)
616 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
617 INITIALIZE_PASS_END(AssumeBuilderPassLegacyPass, "assume-builder",
618 "Assume Builder", false, false)