1 //===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This pass implements whole program optimization of virtual calls in cases
11 // where we know (via !type metadata) that the list of callees is fixed. This
12 // includes the following:
13 // - Single implementation devirtualization: if a virtual call has a single
14 // possible callee, replace all calls with a direct call to that callee.
15 // - Virtual constant propagation: if the virtual function's return type is an
16 // integer <=64 bits and all possible callees are readnone, for each class and
17 // each list of constant arguments: evaluate the function, store the return
18 // value alongside the virtual table, and rewrite each virtual call as a load
19 // from the virtual table.
20 // - Uniform return value optimization: if the conditions for virtual constant
21 // propagation hold and each function returns the same constant value, replace
22 // each virtual call with that constant.
23 // - Unique return value optimization for i1 return values: if the conditions
24 // for virtual constant propagation hold and a single vtable's function
25 // returns 0, or a single vtable's function returns 1, replace each virtual
26 // call with a comparison of the vptr against that vtable's address.
28 //===----------------------------------------------------------------------===//
30 #include "llvm/Transforms/IPO/WholeProgramDevirt.h"
31 #include "llvm/ADT/ArrayRef.h"
32 #include "llvm/ADT/DenseMap.h"
33 #include "llvm/ADT/DenseMapInfo.h"
34 #include "llvm/ADT/DenseSet.h"
35 #include "llvm/ADT/iterator_range.h"
36 #include "llvm/ADT/MapVector.h"
37 #include "llvm/ADT/SmallVector.h"
38 #include "llvm/Analysis/TypeMetadataUtils.h"
39 #include "llvm/IR/CallSite.h"
40 #include "llvm/IR/Constants.h"
41 #include "llvm/IR/DataLayout.h"
42 #include "llvm/IR/DebugInfoMetadata.h"
43 #include "llvm/IR/DebugLoc.h"
44 #include "llvm/IR/DerivedTypes.h"
45 #include "llvm/IR/DiagnosticInfo.h"
46 #include "llvm/IR/Function.h"
47 #include "llvm/IR/GlobalAlias.h"
48 #include "llvm/IR/GlobalVariable.h"
49 #include "llvm/IR/IRBuilder.h"
50 #include "llvm/IR/InstrTypes.h"
51 #include "llvm/IR/Instruction.h"
52 #include "llvm/IR/Instructions.h"
53 #include "llvm/IR/Intrinsics.h"
54 #include "llvm/IR/LLVMContext.h"
55 #include "llvm/IR/Metadata.h"
56 #include "llvm/IR/Module.h"
57 #include "llvm/Pass.h"
58 #include "llvm/PassRegistry.h"
59 #include "llvm/PassSupport.h"
60 #include "llvm/Support/Casting.h"
61 #include "llvm/Support/MathExtras.h"
62 #include "llvm/Transforms/IPO.h"
63 #include "llvm/Transforms/Utils/Evaluator.h"
71 using namespace wholeprogramdevirt;
73 #define DEBUG_TYPE "wholeprogramdevirt"
75 // Find the minimum offset that we may store a value of size Size bits at. If
76 // IsAfter is set, look for an offset before the object, otherwise look for an
77 // offset after the object.
79 wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets,
80 bool IsAfter, uint64_t Size) {
81 // Find a minimum offset taking into account only vtable sizes.
83 for (const VirtualCallTarget &Target : Targets) {
85 MinByte = std::max(MinByte, Target.minAfterBytes());
87 MinByte = std::max(MinByte, Target.minBeforeBytes());
90 // Build a vector of arrays of bytes covering, for each target, a slice of the
91 // used region (see AccumBitVector::BytesUsed in
92 // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively,
93 // this aligns the used regions to start at MinByte.
95 // In this example, A, B and C are vtables, # is a byte already allocated for
96 // a virtual function pointer, AAAA... (etc.) are the used regions for the
97 // vtables and Offset(X) is the value computed for the Offset variable below
103 // A: ################AAAAAAAA|AAAAAAAA
104 // B: ########BBBBBBBBBBBBBBBB|BBBB
105 // C: ########################|CCCCCCCCCCCCCCCC
108 // This code produces the slices of A, B and C that appear after the divider
110 std::vector<ArrayRef<uint8_t>> Used;
111 for (const VirtualCallTarget &Target : Targets) {
112 ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed
113 : Target.TM->Bits->Before.BytesUsed;
114 uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes()
115 : MinByte - Target.minBeforeBytes();
117 // Disregard used regions that are smaller than Offset. These are
118 // effectively all-free regions that do not need to be checked.
119 if (VTUsed.size() > Offset)
120 Used.push_back(VTUsed.slice(Offset));
124 // Find a free bit in each member of Used.
125 for (unsigned I = 0;; ++I) {
126 uint8_t BitsUsed = 0;
127 for (auto &&B : Used)
130 if (BitsUsed != 0xff)
131 return (MinByte + I) * 8 +
132 countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined);
135 // Find a free (Size/8) byte region in each member of Used.
136 // FIXME: see if alignment helps.
137 for (unsigned I = 0;; ++I) {
138 for (auto &&B : Used) {
140 while ((I + Byte) < B.size() && Byte < (Size / 8)) {
146 return (MinByte + I) * 8;
152 void wholeprogramdevirt::setBeforeReturnValues(
153 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore,
154 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
156 OffsetByte = -(AllocBefore / 8 + 1);
158 OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8);
159 OffsetBit = AllocBefore % 8;
161 for (VirtualCallTarget &Target : Targets) {
163 Target.setBeforeBit(AllocBefore);
165 Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8);
169 void wholeprogramdevirt::setAfterReturnValues(
170 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter,
171 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
173 OffsetByte = AllocAfter / 8;
175 OffsetByte = (AllocAfter + 7) / 8;
176 OffsetBit = AllocAfter % 8;
178 for (VirtualCallTarget &Target : Targets) {
180 Target.setAfterBit(AllocAfter);
182 Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8);
186 VirtualCallTarget::VirtualCallTarget(Function *Fn, const TypeMemberInfo *TM)
188 IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()), WasDevirt(false) {}
192 // A slot in a set of virtual tables. The TypeID identifies the set of virtual
193 // tables, and the ByteOffset is the offset in bytes from the address point to
194 // the virtual function pointer.
200 } // end anonymous namespace
204 template <> struct DenseMapInfo<VTableSlot> {
205 static VTableSlot getEmptyKey() {
206 return {DenseMapInfo<Metadata *>::getEmptyKey(),
207 DenseMapInfo<uint64_t>::getEmptyKey()};
209 static VTableSlot getTombstoneKey() {
210 return {DenseMapInfo<Metadata *>::getTombstoneKey(),
211 DenseMapInfo<uint64_t>::getTombstoneKey()};
213 static unsigned getHashValue(const VTableSlot &I) {
214 return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^
215 DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset);
217 static bool isEqual(const VTableSlot &LHS,
218 const VTableSlot &RHS) {
219 return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset;
223 } // end namespace llvm
227 // A virtual call site. VTable is the loaded virtual table pointer, and CS is
228 // the indirect virtual call.
229 struct VirtualCallSite {
233 // If non-null, this field points to the associated unsafe use count stored in
234 // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description
235 // of that field for details.
236 unsigned *NumUnsafeUses;
238 void emitRemark(const Twine &OptName, const Twine &TargetName) {
239 Function *F = CS.getCaller();
240 emitOptimizationRemark(
241 F->getContext(), DEBUG_TYPE, *F,
242 CS.getInstruction()->getDebugLoc(),
243 OptName + ": devirtualized a call to " + TargetName);
246 void replaceAndErase(const Twine &OptName, const Twine &TargetName,
247 bool RemarksEnabled, Value *New) {
249 emitRemark(OptName, TargetName);
250 CS->replaceAllUsesWith(New);
251 if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) {
252 BranchInst::Create(II->getNormalDest(), CS.getInstruction());
253 II->getUnwindDest()->removePredecessor(II->getParent());
255 CS->eraseFromParent();
256 // This use is no longer unsafe.
262 struct DevirtModule {
265 PointerType *Int8PtrTy;
266 IntegerType *Int32Ty;
270 MapVector<VTableSlot, std::vector<VirtualCallSite>> CallSlots;
272 // This map keeps track of the number of "unsafe" uses of a loaded function
273 // pointer. The key is the associated llvm.type.test intrinsic call generated
274 // by this pass. An unsafe use is one that calls the loaded function pointer
275 // directly. Every time we eliminate an unsafe use (for example, by
276 // devirtualizing it or by applying virtual constant propagation), we
277 // decrement the value stored in this map. If a value reaches zero, we can
278 // eliminate the type check by RAUWing the associated llvm.type.test call with
280 std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest;
282 DevirtModule(Module &M)
283 : M(M), Int8Ty(Type::getInt8Ty(M.getContext())),
284 Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
285 Int32Ty(Type::getInt32Ty(M.getContext())),
286 RemarksEnabled(areRemarksEnabled()) {}
288 bool areRemarksEnabled();
290 void scanTypeTestUsers(Function *TypeTestFunc, Function *AssumeFunc);
291 void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc);
293 void buildTypeIdentifierMap(
294 std::vector<VTableBits> &Bits,
295 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap);
296 Constant *getPointerAtOffset(Constant *I, uint64_t Offset);
298 tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
299 const std::set<TypeMemberInfo> &TypeMemberInfos,
300 uint64_t ByteOffset);
301 bool trySingleImplDevirt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
302 MutableArrayRef<VirtualCallSite> CallSites);
303 bool tryEvaluateFunctionsWithArgs(
304 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
305 ArrayRef<ConstantInt *> Args);
306 bool tryUniformRetValOpt(IntegerType *RetType,
307 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
308 MutableArrayRef<VirtualCallSite> CallSites);
309 bool tryUniqueRetValOpt(unsigned BitWidth,
310 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
311 MutableArrayRef<VirtualCallSite> CallSites);
312 bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
313 ArrayRef<VirtualCallSite> CallSites);
315 void rebuildGlobal(VTableBits &B);
320 struct WholeProgramDevirt : public ModulePass {
323 WholeProgramDevirt() : ModulePass(ID) {
324 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
327 bool runOnModule(Module &M) override {
331 return DevirtModule(M).run();
335 } // end anonymous namespace
337 INITIALIZE_PASS(WholeProgramDevirt, "wholeprogramdevirt",
338 "Whole program devirtualization", false, false)
339 char WholeProgramDevirt::ID = 0;
341 ModulePass *llvm::createWholeProgramDevirtPass() {
342 return new WholeProgramDevirt;
345 PreservedAnalyses WholeProgramDevirtPass::run(Module &M,
346 ModuleAnalysisManager &) {
347 if (!DevirtModule(M).run())
348 return PreservedAnalyses::all();
349 return PreservedAnalyses::none();
352 void DevirtModule::buildTypeIdentifierMap(
353 std::vector<VTableBits> &Bits,
354 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) {
355 DenseMap<GlobalVariable *, VTableBits *> GVToBits;
356 Bits.reserve(M.getGlobalList().size());
357 SmallVector<MDNode *, 2> Types;
358 for (GlobalVariable &GV : M.globals()) {
360 GV.getMetadata(LLVMContext::MD_type, Types);
364 VTableBits *&BitsPtr = GVToBits[&GV];
367 Bits.back().GV = &GV;
368 Bits.back().ObjectSize =
369 M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType());
370 BitsPtr = &Bits.back();
373 for (MDNode *Type : Types) {
374 auto TypeID = Type->getOperand(1).get();
378 cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
381 TypeIdMap[TypeID].insert({BitsPtr, Offset});
386 Constant *DevirtModule::getPointerAtOffset(Constant *I, uint64_t Offset) {
387 if (I->getType()->isPointerTy()) {
393 const DataLayout &DL = M.getDataLayout();
395 if (auto *C = dyn_cast<ConstantStruct>(I)) {
396 const StructLayout *SL = DL.getStructLayout(C->getType());
397 if (Offset >= SL->getSizeInBytes())
400 unsigned Op = SL->getElementContainingOffset(Offset);
401 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
402 Offset - SL->getElementOffset(Op));
404 if (auto *C = dyn_cast<ConstantArray>(I)) {
405 ArrayType *VTableTy = C->getType();
406 uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
408 unsigned Op = Offset / ElemSize;
409 if (Op >= C->getNumOperands())
412 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
418 bool DevirtModule::tryFindVirtualCallTargets(
419 std::vector<VirtualCallTarget> &TargetsForSlot,
420 const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset) {
421 for (const TypeMemberInfo &TM : TypeMemberInfos) {
422 if (!TM.Bits->GV->isConstant())
425 Constant *Ptr = getPointerAtOffset(TM.Bits->GV->getInitializer(),
426 TM.Offset + ByteOffset);
430 auto Fn = dyn_cast<Function>(Ptr->stripPointerCasts());
434 // We can disregard __cxa_pure_virtual as a possible call target, as
435 // calls to pure virtuals are UB.
436 if (Fn->getName() == "__cxa_pure_virtual")
439 TargetsForSlot.push_back({Fn, &TM});
442 // Give up if we couldn't find any targets.
443 return !TargetsForSlot.empty();
446 bool DevirtModule::trySingleImplDevirt(
447 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
448 MutableArrayRef<VirtualCallSite> CallSites) {
449 // See if the program contains a single implementation of this virtual
451 Function *TheFn = TargetsForSlot[0].Fn;
452 for (auto &&Target : TargetsForSlot)
453 if (TheFn != Target.Fn)
457 TargetsForSlot[0].WasDevirt = true;
458 // If so, update each call site to call that implementation directly.
459 for (auto &&VCallSite : CallSites) {
461 VCallSite.emitRemark("single-impl", TheFn->getName());
462 VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast(
463 TheFn, VCallSite.CS.getCalledValue()->getType()));
464 // This use is no longer unsafe.
465 if (VCallSite.NumUnsafeUses)
466 --*VCallSite.NumUnsafeUses;
471 bool DevirtModule::tryEvaluateFunctionsWithArgs(
472 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
473 ArrayRef<ConstantInt *> Args) {
474 // Evaluate each function and store the result in each target's RetVal
476 for (VirtualCallTarget &Target : TargetsForSlot) {
477 if (Target.Fn->arg_size() != Args.size() + 1)
479 for (unsigned I = 0; I != Args.size(); ++I)
480 if (Target.Fn->getFunctionType()->getParamType(I + 1) !=
484 Evaluator Eval(M.getDataLayout(), nullptr);
485 SmallVector<Constant *, 2> EvalArgs;
487 Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0)));
488 EvalArgs.insert(EvalArgs.end(), Args.begin(), Args.end());
490 if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) ||
491 !isa<ConstantInt>(RetVal))
493 Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
498 bool DevirtModule::tryUniformRetValOpt(
499 IntegerType *RetType, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
500 MutableArrayRef<VirtualCallSite> CallSites) {
501 // Uniform return value optimization. If all functions return the same
502 // constant, replace all calls with that constant.
503 uint64_t TheRetVal = TargetsForSlot[0].RetVal;
504 for (const VirtualCallTarget &Target : TargetsForSlot)
505 if (Target.RetVal != TheRetVal)
508 auto TheRetValConst = ConstantInt::get(RetType, TheRetVal);
509 for (auto Call : CallSites)
510 Call.replaceAndErase("uniform-ret-val", TargetsForSlot[0].Fn->getName(),
511 RemarksEnabled, TheRetValConst);
513 for (auto &&Target : TargetsForSlot)
514 Target.WasDevirt = true;
518 bool DevirtModule::tryUniqueRetValOpt(
519 unsigned BitWidth, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
520 MutableArrayRef<VirtualCallSite> CallSites) {
521 // IsOne controls whether we look for a 0 or a 1.
522 auto tryUniqueRetValOptFor = [&](bool IsOne) {
523 const TypeMemberInfo *UniqueMember = nullptr;
524 for (const VirtualCallTarget &Target : TargetsForSlot) {
525 if (Target.RetVal == (IsOne ? 1 : 0)) {
528 UniqueMember = Target.TM;
532 // We should have found a unique member or bailed out by now. We already
533 // checked for a uniform return value in tryUniformRetValOpt.
534 assert(UniqueMember);
536 // Replace each call with the comparison.
537 for (auto &&Call : CallSites) {
538 IRBuilder<> B(Call.CS.getInstruction());
539 Value *OneAddr = B.CreateBitCast(UniqueMember->Bits->GV, Int8PtrTy);
540 OneAddr = B.CreateConstGEP1_64(OneAddr, UniqueMember->Offset);
541 Value *Cmp = B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE,
542 Call.VTable, OneAddr);
543 Call.replaceAndErase("unique-ret-val", TargetsForSlot[0].Fn->getName(),
544 RemarksEnabled, Cmp);
546 // Update devirtualization statistics for targets.
548 for (auto &&Target : TargetsForSlot)
549 Target.WasDevirt = true;
555 if (tryUniqueRetValOptFor(true))
557 if (tryUniqueRetValOptFor(false))
563 bool DevirtModule::tryVirtualConstProp(
564 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
565 ArrayRef<VirtualCallSite> CallSites) {
566 // This only works if the function returns an integer.
567 auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
570 unsigned BitWidth = RetType->getBitWidth();
574 // Make sure that each function does not access memory, takes at least one
575 // argument, does not use its first argument (which we assume is 'this'),
576 // and has the same return type.
577 for (VirtualCallTarget &Target : TargetsForSlot) {
578 if (!Target.Fn->doesNotAccessMemory() || Target.Fn->arg_empty() ||
579 !Target.Fn->arg_begin()->use_empty() ||
580 Target.Fn->getReturnType() != RetType)
584 // Group call sites by the list of constant arguments they pass.
585 // The comparator ensures deterministic ordering.
586 struct ByAPIntValue {
587 bool operator()(const std::vector<ConstantInt *> &A,
588 const std::vector<ConstantInt *> &B) const {
589 return std::lexicographical_compare(
590 A.begin(), A.end(), B.begin(), B.end(),
591 [](ConstantInt *AI, ConstantInt *BI) {
592 return AI->getValue().ult(BI->getValue());
596 std::map<std::vector<ConstantInt *>, std::vector<VirtualCallSite>,
598 VCallSitesByConstantArg;
599 for (auto &&VCallSite : CallSites) {
600 std::vector<ConstantInt *> Args;
601 if (VCallSite.CS.getType() != RetType)
604 make_range(VCallSite.CS.arg_begin() + 1, VCallSite.CS.arg_end())) {
605 if (!isa<ConstantInt>(Arg))
607 Args.push_back(cast<ConstantInt>(&Arg));
609 if (Args.size() + 1 != VCallSite.CS.arg_size())
612 VCallSitesByConstantArg[Args].push_back(VCallSite);
615 for (auto &&CSByConstantArg : VCallSitesByConstantArg) {
616 if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
619 if (tryUniformRetValOpt(RetType, TargetsForSlot, CSByConstantArg.second))
622 if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second))
625 // Find an allocation offset in bits in all vtables associated with the
627 uint64_t AllocBefore =
628 findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth);
629 uint64_t AllocAfter =
630 findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth);
632 // Calculate the total amount of padding needed to store a value at both
633 // ends of the object.
634 uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
635 for (auto &&Target : TargetsForSlot) {
636 TotalPaddingBefore += std::max<int64_t>(
637 (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0);
638 TotalPaddingAfter += std::max<int64_t>(
639 (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0);
642 // If the amount of padding is too large, give up.
643 // FIXME: do something smarter here.
644 if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
647 // Calculate the offset to the value as a (possibly negative) byte offset
648 // and (if applicable) a bit offset, and store the values in the targets.
651 if (TotalPaddingBefore <= TotalPaddingAfter)
652 setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte,
655 setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte,
659 for (auto &&Target : TargetsForSlot)
660 Target.WasDevirt = true;
662 // Rewrite each call to a load from OffsetByte/OffsetBit.
663 for (auto Call : CSByConstantArg.second) {
664 IRBuilder<> B(Call.CS.getInstruction());
665 Value *Addr = B.CreateConstGEP1_64(Call.VTable, OffsetByte);
667 Value *Bits = B.CreateLoad(Addr);
668 Value *Bit = ConstantInt::get(Int8Ty, 1ULL << OffsetBit);
669 Value *BitsAndBit = B.CreateAnd(Bits, Bit);
670 auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0));
671 Call.replaceAndErase("virtual-const-prop-1-bit",
672 TargetsForSlot[0].Fn->getName(),
673 RemarksEnabled, IsBitSet);
675 Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo());
676 Value *Val = B.CreateLoad(RetType, ValAddr);
677 Call.replaceAndErase("virtual-const-prop",
678 TargetsForSlot[0].Fn->getName(),
679 RemarksEnabled, Val);
686 void DevirtModule::rebuildGlobal(VTableBits &B) {
687 if (B.Before.Bytes.empty() && B.After.Bytes.empty())
690 // Align each byte array to pointer width.
691 unsigned PointerSize = M.getDataLayout().getPointerSize();
692 B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize));
693 B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize));
695 // Before was stored in reverse order; flip it now.
696 for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I)
697 std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]);
699 // Build an anonymous global containing the before bytes, followed by the
700 // original initializer, followed by the after bytes.
701 auto NewInit = ConstantStruct::getAnon(
702 {ConstantDataArray::get(M.getContext(), B.Before.Bytes),
703 B.GV->getInitializer(),
704 ConstantDataArray::get(M.getContext(), B.After.Bytes)});
706 new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(),
707 GlobalVariable::PrivateLinkage, NewInit, "", B.GV);
708 NewGV->setSection(B.GV->getSection());
709 NewGV->setComdat(B.GV->getComdat());
711 // Copy the original vtable's metadata to the anonymous global, adjusting
712 // offsets as required.
713 NewGV->copyMetadata(B.GV, B.Before.Bytes.size());
715 // Build an alias named after the original global, pointing at the second
716 // element (the original initializer).
717 auto Alias = GlobalAlias::create(
718 B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "",
719 ConstantExpr::getGetElementPtr(
720 NewInit->getType(), NewGV,
721 ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0),
722 ConstantInt::get(Int32Ty, 1)}),
724 Alias->setVisibility(B.GV->getVisibility());
725 Alias->takeName(B.GV);
727 B.GV->replaceAllUsesWith(Alias);
728 B.GV->eraseFromParent();
731 bool DevirtModule::areRemarksEnabled() {
732 const auto &FL = M.getFunctionList();
735 const Function &Fn = FL.front();
736 auto DI = OptimizationRemark(DEBUG_TYPE, Fn, DebugLoc(), "");
737 return DI.isEnabled();
740 void DevirtModule::scanTypeTestUsers(Function *TypeTestFunc,
741 Function *AssumeFunc) {
742 // Find all virtual calls via a virtual table pointer %p under an assumption
743 // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p
744 // points to a member of the type identifier %md. Group calls by (type ID,
745 // offset) pair (effectively the identity of the virtual function) and store
747 DenseSet<Value *> SeenPtrs;
748 for (auto I = TypeTestFunc->use_begin(), E = TypeTestFunc->use_end();
750 auto CI = dyn_cast<CallInst>(I->getUser());
755 // Search for virtual calls based on %p and add them to DevirtCalls.
756 SmallVector<DevirtCallSite, 1> DevirtCalls;
757 SmallVector<CallInst *, 1> Assumes;
758 findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI);
760 // If we found any, add them to CallSlots. Only do this if we haven't seen
761 // the vtable pointer before, as it may have been CSE'd with pointers from
762 // other call sites, and we don't want to process call sites multiple times.
763 if (!Assumes.empty()) {
765 cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
766 Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
767 if (SeenPtrs.insert(Ptr).second) {
768 for (DevirtCallSite Call : DevirtCalls) {
769 CallSlots[{TypeId, Call.Offset}].push_back(
770 {CI->getArgOperand(0), Call.CS, nullptr});
775 // We no longer need the assumes or the type test.
776 for (auto Assume : Assumes)
777 Assume->eraseFromParent();
778 // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we
779 // may use the vtable argument later.
781 CI->eraseFromParent();
785 void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) {
786 Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test);
788 for (auto I = TypeCheckedLoadFunc->use_begin(),
789 E = TypeCheckedLoadFunc->use_end();
791 auto CI = dyn_cast<CallInst>(I->getUser());
796 Value *Ptr = CI->getArgOperand(0);
797 Value *Offset = CI->getArgOperand(1);
798 Value *TypeIdValue = CI->getArgOperand(2);
799 Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata();
801 SmallVector<DevirtCallSite, 1> DevirtCalls;
802 SmallVector<Instruction *, 1> LoadedPtrs;
803 SmallVector<Instruction *, 1> Preds;
804 bool HasNonCallUses = false;
805 findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
808 // Start by generating "pessimistic" code that explicitly loads the function
809 // pointer from the vtable and performs the type check. If possible, we will
810 // eliminate the load and the type check later.
812 // If possible, only generate the load at the point where it is used.
813 // This helps avoid unnecessary spills.
815 (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI);
816 Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset);
817 Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy));
818 Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr);
820 for (Instruction *LoadedPtr : LoadedPtrs) {
821 LoadedPtr->replaceAllUsesWith(LoadedValue);
822 LoadedPtr->eraseFromParent();
825 // Likewise for the type test.
826 IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI);
827 CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue});
829 for (Instruction *Pred : Preds) {
830 Pred->replaceAllUsesWith(TypeTestCall);
831 Pred->eraseFromParent();
834 // We have already erased any extractvalue instructions that refer to the
835 // intrinsic call, but the intrinsic may have other non-extractvalue uses
836 // (although this is unlikely). In that case, explicitly build a pair and
838 if (!CI->use_empty()) {
839 Value *Pair = UndefValue::get(CI->getType());
841 Pair = B.CreateInsertValue(Pair, LoadedValue, {0});
842 Pair = B.CreateInsertValue(Pair, TypeTestCall, {1});
843 CI->replaceAllUsesWith(Pair);
846 // The number of unsafe uses is initially the number of uses.
847 auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall];
848 NumUnsafeUses = DevirtCalls.size();
850 // If the function pointer has a non-call user, we cannot eliminate the type
851 // check, as one of those users may eventually call the pointer. Increment
852 // the unsafe use count to make sure it cannot reach zero.
855 for (DevirtCallSite Call : DevirtCalls) {
856 CallSlots[{TypeId, Call.Offset}].push_back(
857 {Ptr, Call.CS, &NumUnsafeUses});
860 CI->eraseFromParent();
864 bool DevirtModule::run() {
865 Function *TypeTestFunc =
866 M.getFunction(Intrinsic::getName(Intrinsic::type_test));
867 Function *TypeCheckedLoadFunc =
868 M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load));
869 Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume));
871 if ((!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc ||
872 AssumeFunc->use_empty()) &&
873 (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty()))
876 if (TypeTestFunc && AssumeFunc)
877 scanTypeTestUsers(TypeTestFunc, AssumeFunc);
879 if (TypeCheckedLoadFunc)
880 scanTypeCheckedLoadUsers(TypeCheckedLoadFunc);
882 // Rebuild type metadata into a map for easy lookup.
883 std::vector<VTableBits> Bits;
884 DenseMap<Metadata *, std::set<TypeMemberInfo>> TypeIdMap;
885 buildTypeIdentifierMap(Bits, TypeIdMap);
886 if (TypeIdMap.empty())
889 // For each (type, offset) pair:
890 bool DidVirtualConstProp = false;
891 std::map<std::string, Function*> DevirtTargets;
892 for (auto &S : CallSlots) {
893 // Search each of the members of the type identifier for the virtual
894 // function implementation at offset S.first.ByteOffset, and add to
896 std::vector<VirtualCallTarget> TargetsForSlot;
897 if (!tryFindVirtualCallTargets(TargetsForSlot, TypeIdMap[S.first.TypeID],
901 if (!trySingleImplDevirt(TargetsForSlot, S.second) &&
902 tryVirtualConstProp(TargetsForSlot, S.second))
903 DidVirtualConstProp = true;
905 // Collect functions devirtualized at least for one call site for stats.
907 for (const auto &T : TargetsForSlot)
909 DevirtTargets[T.Fn->getName()] = T.Fn;
912 if (RemarksEnabled) {
913 // Generate remarks for each devirtualized function.
914 for (const auto &DT : DevirtTargets) {
915 Function *F = DT.second;
916 DISubprogram *SP = F->getSubprogram();
917 DebugLoc DL = SP ? DebugLoc::get(SP->getScopeLine(), 0, SP) : DebugLoc();
918 emitOptimizationRemark(F->getContext(), DEBUG_TYPE, *F, DL,
919 Twine("devirtualized ") + F->getName());
923 // If we were able to eliminate all unsafe uses for a type checked load,
924 // eliminate the type test by replacing it with true.
925 if (TypeCheckedLoadFunc) {
926 auto True = ConstantInt::getTrue(M.getContext());
927 for (auto &&U : NumUnsafeUsesForTypeTest) {
929 U.first->replaceAllUsesWith(True);
930 U.first->eraseFromParent();
935 // Rebuild each global we touched as part of virtual constant propagation to
936 // include the before and after bytes.
937 if (DidVirtualConstProp)
938 for (VTableBits &B : Bits)