1 //===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This pass implements whole program optimization of virtual calls in cases
11 // where we know (via !type metadata) that the list of callees is fixed. This
12 // includes the following:
13 // - Single implementation devirtualization: if a virtual call has a single
14 // possible callee, replace all calls with a direct call to that callee.
15 // - Virtual constant propagation: if the virtual function's return type is an
16 // integer <=64 bits and all possible callees are readnone, for each class and
17 // each list of constant arguments: evaluate the function, store the return
18 // value alongside the virtual table, and rewrite each virtual call as a load
19 // from the virtual table.
20 // - Uniform return value optimization: if the conditions for virtual constant
21 // propagation hold and each function returns the same constant value, replace
22 // each virtual call with that constant.
23 // - Unique return value optimization for i1 return values: if the conditions
24 // for virtual constant propagation hold and a single vtable's function
25 // returns 0, or a single vtable's function returns 1, replace each virtual
26 // call with a comparison of the vptr against that vtable's address.
28 // This pass is intended to be used during the regular and thin LTO pipelines.
29 // During regular LTO, the pass determines the best optimization for each
30 // virtual call and applies the resolutions directly to virtual calls that are
31 // eligible for virtual call optimization (i.e. calls that use either of the
32 // llvm.assume(llvm.type.test) or llvm.type.checked.load intrinsics). During
33 // ThinLTO, the pass operates in two phases:
34 // - Export phase: this is run during the thin link over a single merged module
35 // that contains all vtables with !type metadata that participate in the link.
36 // The pass computes a resolution for each virtual call and stores it in the
37 // type identifier summary.
38 // - Import phase: this is run during the thin backends over the individual
39 // modules. The pass applies the resolutions previously computed during the
40 // import phase to each eligible virtual call.
42 //===----------------------------------------------------------------------===//
44 #include "llvm/Transforms/IPO/WholeProgramDevirt.h"
45 #include "llvm/ADT/ArrayRef.h"
46 #include "llvm/ADT/DenseMap.h"
47 #include "llvm/ADT/DenseMapInfo.h"
48 #include "llvm/ADT/DenseSet.h"
49 #include "llvm/ADT/MapVector.h"
50 #include "llvm/ADT/SmallVector.h"
51 #include "llvm/ADT/iterator_range.h"
52 #include "llvm/Analysis/AliasAnalysis.h"
53 #include "llvm/Analysis/BasicAliasAnalysis.h"
54 #include "llvm/Analysis/OptimizationRemarkEmitter.h"
55 #include "llvm/Analysis/TypeMetadataUtils.h"
56 #include "llvm/IR/CallSite.h"
57 #include "llvm/IR/Constants.h"
58 #include "llvm/IR/DataLayout.h"
59 #include "llvm/IR/DebugLoc.h"
60 #include "llvm/IR/DerivedTypes.h"
61 #include "llvm/IR/Function.h"
62 #include "llvm/IR/GlobalAlias.h"
63 #include "llvm/IR/GlobalVariable.h"
64 #include "llvm/IR/IRBuilder.h"
65 #include "llvm/IR/InstrTypes.h"
66 #include "llvm/IR/Instruction.h"
67 #include "llvm/IR/Instructions.h"
68 #include "llvm/IR/Intrinsics.h"
69 #include "llvm/IR/LLVMContext.h"
70 #include "llvm/IR/Metadata.h"
71 #include "llvm/IR/Module.h"
72 #include "llvm/IR/ModuleSummaryIndexYAML.h"
73 #include "llvm/Pass.h"
74 #include "llvm/PassRegistry.h"
75 #include "llvm/PassSupport.h"
76 #include "llvm/Support/Casting.h"
77 #include "llvm/Support/Error.h"
78 #include "llvm/Support/FileSystem.h"
79 #include "llvm/Support/MathExtras.h"
80 #include "llvm/Transforms/IPO.h"
81 #include "llvm/Transforms/IPO/FunctionAttrs.h"
82 #include "llvm/Transforms/Utils/Evaluator.h"
90 using namespace wholeprogramdevirt;
92 #define DEBUG_TYPE "wholeprogramdevirt"
94 static cl::opt<PassSummaryAction> ClSummaryAction(
95 "wholeprogramdevirt-summary-action",
96 cl::desc("What to do with the summary when running this pass"),
97 cl::values(clEnumValN(PassSummaryAction::None, "none", "Do nothing"),
98 clEnumValN(PassSummaryAction::Import, "import",
99 "Import typeid resolutions from summary and globals"),
100 clEnumValN(PassSummaryAction::Export, "export",
101 "Export typeid resolutions to summary and globals")),
104 static cl::opt<std::string> ClReadSummary(
105 "wholeprogramdevirt-read-summary",
106 cl::desc("Read summary from given YAML file before running pass"),
109 static cl::opt<std::string> ClWriteSummary(
110 "wholeprogramdevirt-write-summary",
111 cl::desc("Write summary to given YAML file after running pass"),
114 // Find the minimum offset that we may store a value of size Size bits at. If
115 // IsAfter is set, look for an offset before the object, otherwise look for an
116 // offset after the object.
118 wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets,
119 bool IsAfter, uint64_t Size) {
120 // Find a minimum offset taking into account only vtable sizes.
121 uint64_t MinByte = 0;
122 for (const VirtualCallTarget &Target : Targets) {
124 MinByte = std::max(MinByte, Target.minAfterBytes());
126 MinByte = std::max(MinByte, Target.minBeforeBytes());
129 // Build a vector of arrays of bytes covering, for each target, a slice of the
130 // used region (see AccumBitVector::BytesUsed in
131 // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively,
132 // this aligns the used regions to start at MinByte.
134 // In this example, A, B and C are vtables, # is a byte already allocated for
135 // a virtual function pointer, AAAA... (etc.) are the used regions for the
136 // vtables and Offset(X) is the value computed for the Offset variable below
142 // A: ################AAAAAAAA|AAAAAAAA
143 // B: ########BBBBBBBBBBBBBBBB|BBBB
144 // C: ########################|CCCCCCCCCCCCCCCC
147 // This code produces the slices of A, B and C that appear after the divider
149 std::vector<ArrayRef<uint8_t>> Used;
150 for (const VirtualCallTarget &Target : Targets) {
151 ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed
152 : Target.TM->Bits->Before.BytesUsed;
153 uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes()
154 : MinByte - Target.minBeforeBytes();
156 // Disregard used regions that are smaller than Offset. These are
157 // effectively all-free regions that do not need to be checked.
158 if (VTUsed.size() > Offset)
159 Used.push_back(VTUsed.slice(Offset));
163 // Find a free bit in each member of Used.
164 for (unsigned I = 0;; ++I) {
165 uint8_t BitsUsed = 0;
166 for (auto &&B : Used)
169 if (BitsUsed != 0xff)
170 return (MinByte + I) * 8 +
171 countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined);
174 // Find a free (Size/8) byte region in each member of Used.
175 // FIXME: see if alignment helps.
176 for (unsigned I = 0;; ++I) {
177 for (auto &&B : Used) {
179 while ((I + Byte) < B.size() && Byte < (Size / 8)) {
185 return (MinByte + I) * 8;
191 void wholeprogramdevirt::setBeforeReturnValues(
192 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore,
193 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
195 OffsetByte = -(AllocBefore / 8 + 1);
197 OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8);
198 OffsetBit = AllocBefore % 8;
200 for (VirtualCallTarget &Target : Targets) {
202 Target.setBeforeBit(AllocBefore);
204 Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8);
208 void wholeprogramdevirt::setAfterReturnValues(
209 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter,
210 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
212 OffsetByte = AllocAfter / 8;
214 OffsetByte = (AllocAfter + 7) / 8;
215 OffsetBit = AllocAfter % 8;
217 for (VirtualCallTarget &Target : Targets) {
219 Target.setAfterBit(AllocAfter);
221 Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8);
225 VirtualCallTarget::VirtualCallTarget(Function *Fn, const TypeMemberInfo *TM)
227 IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()), WasDevirt(false) {}
231 // A slot in a set of virtual tables. The TypeID identifies the set of virtual
232 // tables, and the ByteOffset is the offset in bytes from the address point to
233 // the virtual function pointer.
239 } // end anonymous namespace
243 template <> struct DenseMapInfo<VTableSlot> {
244 static VTableSlot getEmptyKey() {
245 return {DenseMapInfo<Metadata *>::getEmptyKey(),
246 DenseMapInfo<uint64_t>::getEmptyKey()};
248 static VTableSlot getTombstoneKey() {
249 return {DenseMapInfo<Metadata *>::getTombstoneKey(),
250 DenseMapInfo<uint64_t>::getTombstoneKey()};
252 static unsigned getHashValue(const VTableSlot &I) {
253 return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^
254 DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset);
256 static bool isEqual(const VTableSlot &LHS,
257 const VTableSlot &RHS) {
258 return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset;
262 } // end namespace llvm
266 // A virtual call site. VTable is the loaded virtual table pointer, and CS is
267 // the indirect virtual call.
268 struct VirtualCallSite {
272 // If non-null, this field points to the associated unsafe use count stored in
273 // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description
274 // of that field for details.
275 unsigned *NumUnsafeUses;
278 emitRemark(const StringRef OptName, const StringRef TargetName,
279 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter) {
280 Function *F = CS.getCaller();
281 DebugLoc DLoc = CS->getDebugLoc();
282 BasicBlock *Block = CS.getParent();
284 // In the new pass manager, we can request the optimization
285 // remark emitter pass on a per-function-basis, which the
286 // OREGetter will do for us.
287 // In the old pass manager, this is harder, so we just build
288 // a optimization remark emitter on the fly, when we need it.
289 std::unique_ptr<OptimizationRemarkEmitter> OwnedORE;
290 OptimizationRemarkEmitter *ORE;
294 OwnedORE = make_unique<OptimizationRemarkEmitter>(F);
295 ORE = OwnedORE.get();
299 ORE->emit(OptimizationRemark(DEBUG_TYPE, OptName, DLoc, Block)
300 << NV("Optimization", OptName) << ": devirtualized a call to "
301 << NV("FunctionName", TargetName));
304 void replaceAndErase(
305 const StringRef OptName, const StringRef TargetName, bool RemarksEnabled,
306 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter,
309 emitRemark(OptName, TargetName, OREGetter);
310 CS->replaceAllUsesWith(New);
311 if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) {
312 BranchInst::Create(II->getNormalDest(), CS.getInstruction());
313 II->getUnwindDest()->removePredecessor(II->getParent());
315 CS->eraseFromParent();
316 // This use is no longer unsafe.
322 // Call site information collected for a specific VTableSlot and possibly a list
323 // of constant integer arguments. The grouping by arguments is handled by the
324 // VTableSlotInfo class.
325 struct CallSiteInfo {
326 /// The set of call sites for this slot. Used during regular LTO and the
327 /// import phase of ThinLTO (as well as the export phase of ThinLTO for any
328 /// call sites that appear in the merged module itself); in each of these
329 /// cases we are directly operating on the call sites at the IR level.
330 std::vector<VirtualCallSite> CallSites;
332 // These fields are used during the export phase of ThinLTO and reflect
333 // information collected from function summaries.
335 /// Whether any function summary contains an llvm.assume(llvm.type.test) for
337 bool SummaryHasTypeTestAssumeUsers;
339 /// CFI-specific: a vector containing the list of function summaries that use
340 /// the llvm.type.checked.load intrinsic and therefore will require
341 /// resolutions for llvm.type.test in order to implement CFI checks if
342 /// devirtualization was unsuccessful. If devirtualization was successful, the
343 /// pass will clear this vector by calling markDevirt(). If at the end of the
344 /// pass the vector is non-empty, we will need to add a use of llvm.type.test
345 /// to each of the function summaries in the vector.
346 std::vector<FunctionSummary *> SummaryTypeCheckedLoadUsers;
348 bool isExported() const {
349 return SummaryHasTypeTestAssumeUsers ||
350 !SummaryTypeCheckedLoadUsers.empty();
353 /// As explained in the comment for SummaryTypeCheckedLoadUsers.
354 void markDevirt() { SummaryTypeCheckedLoadUsers.clear(); }
357 // Call site information collected for a specific VTableSlot.
358 struct VTableSlotInfo {
359 // The set of call sites which do not have all constant integer arguments
360 // (excluding "this").
363 // The set of call sites with all constant integer arguments (excluding
364 // "this"), grouped by argument list.
365 std::map<std::vector<uint64_t>, CallSiteInfo> ConstCSInfo;
367 void addCallSite(Value *VTable, CallSite CS, unsigned *NumUnsafeUses);
370 CallSiteInfo &findCallSiteInfo(CallSite CS);
373 CallSiteInfo &VTableSlotInfo::findCallSiteInfo(CallSite CS) {
374 std::vector<uint64_t> Args;
375 auto *CI = dyn_cast<IntegerType>(CS.getType());
376 if (!CI || CI->getBitWidth() > 64 || CS.arg_empty())
378 for (auto &&Arg : make_range(CS.arg_begin() + 1, CS.arg_end())) {
379 auto *CI = dyn_cast<ConstantInt>(Arg);
380 if (!CI || CI->getBitWidth() > 64)
382 Args.push_back(CI->getZExtValue());
384 return ConstCSInfo[Args];
387 void VTableSlotInfo::addCallSite(Value *VTable, CallSite CS,
388 unsigned *NumUnsafeUses) {
389 findCallSiteInfo(CS).CallSites.push_back({VTable, CS, NumUnsafeUses});
392 struct DevirtModule {
394 function_ref<AAResults &(Function &)> AARGetter;
396 ModuleSummaryIndex *ExportSummary;
397 const ModuleSummaryIndex *ImportSummary;
400 PointerType *Int8PtrTy;
401 IntegerType *Int32Ty;
402 IntegerType *Int64Ty;
403 IntegerType *IntPtrTy;
406 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter;
408 MapVector<VTableSlot, VTableSlotInfo> CallSlots;
410 // This map keeps track of the number of "unsafe" uses of a loaded function
411 // pointer. The key is the associated llvm.type.test intrinsic call generated
412 // by this pass. An unsafe use is one that calls the loaded function pointer
413 // directly. Every time we eliminate an unsafe use (for example, by
414 // devirtualizing it or by applying virtual constant propagation), we
415 // decrement the value stored in this map. If a value reaches zero, we can
416 // eliminate the type check by RAUWing the associated llvm.type.test call with
418 std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest;
420 DevirtModule(Module &M, function_ref<AAResults &(Function &)> AARGetter,
421 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter,
422 ModuleSummaryIndex *ExportSummary,
423 const ModuleSummaryIndex *ImportSummary)
424 : M(M), AARGetter(AARGetter), ExportSummary(ExportSummary),
425 ImportSummary(ImportSummary), Int8Ty(Type::getInt8Ty(M.getContext())),
426 Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
427 Int32Ty(Type::getInt32Ty(M.getContext())),
428 Int64Ty(Type::getInt64Ty(M.getContext())),
429 IntPtrTy(M.getDataLayout().getIntPtrType(M.getContext(), 0)),
430 RemarksEnabled(areRemarksEnabled()), OREGetter(OREGetter) {
431 assert(!(ExportSummary && ImportSummary));
434 bool areRemarksEnabled();
436 void scanTypeTestUsers(Function *TypeTestFunc, Function *AssumeFunc);
437 void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc);
439 void buildTypeIdentifierMap(
440 std::vector<VTableBits> &Bits,
441 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap);
442 Constant *getPointerAtOffset(Constant *I, uint64_t Offset);
444 tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
445 const std::set<TypeMemberInfo> &TypeMemberInfos,
446 uint64_t ByteOffset);
448 void applySingleImplDevirt(VTableSlotInfo &SlotInfo, Constant *TheFn,
450 bool trySingleImplDevirt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
451 VTableSlotInfo &SlotInfo,
452 WholeProgramDevirtResolution *Res);
454 bool tryEvaluateFunctionsWithArgs(
455 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
456 ArrayRef<uint64_t> Args);
458 void applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
460 bool tryUniformRetValOpt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
461 CallSiteInfo &CSInfo,
462 WholeProgramDevirtResolution::ByArg *Res);
464 // Returns the global symbol name that is used to export information about the
465 // given vtable slot and list of arguments.
466 std::string getGlobalName(VTableSlot Slot, ArrayRef<uint64_t> Args,
469 bool shouldExportConstantsAsAbsoluteSymbols();
471 // This function is called during the export phase to create a symbol
472 // definition containing information about the given vtable slot and list of
474 void exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name,
476 void exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name,
477 uint32_t Const, uint32_t &Storage);
479 // This function is called during the import phase to create a reference to
480 // the symbol definition created during the export phase.
481 Constant *importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
483 Constant *importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
484 StringRef Name, IntegerType *IntTy,
487 void applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName, bool IsOne,
488 Constant *UniqueMemberAddr);
489 bool tryUniqueRetValOpt(unsigned BitWidth,
490 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
491 CallSiteInfo &CSInfo,
492 WholeProgramDevirtResolution::ByArg *Res,
493 VTableSlot Slot, ArrayRef<uint64_t> Args);
495 void applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName,
496 Constant *Byte, Constant *Bit);
497 bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
498 VTableSlotInfo &SlotInfo,
499 WholeProgramDevirtResolution *Res, VTableSlot Slot);
501 void rebuildGlobal(VTableBits &B);
503 // Apply the summary resolution for Slot to all virtual calls in SlotInfo.
504 void importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo);
506 // If we were able to eliminate all unsafe uses for a type checked load,
507 // eliminate the associated type tests by replacing them with true.
508 void removeRedundantTypeTests();
512 // Lower the module using the action and summary passed as command line
513 // arguments. For testing purposes only.
514 static bool runForTesting(
515 Module &M, function_ref<AAResults &(Function &)> AARGetter,
516 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter);
519 struct WholeProgramDevirt : public ModulePass {
522 bool UseCommandLine = false;
524 ModuleSummaryIndex *ExportSummary;
525 const ModuleSummaryIndex *ImportSummary;
527 WholeProgramDevirt() : ModulePass(ID), UseCommandLine(true) {
528 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
531 WholeProgramDevirt(ModuleSummaryIndex *ExportSummary,
532 const ModuleSummaryIndex *ImportSummary)
533 : ModulePass(ID), ExportSummary(ExportSummary),
534 ImportSummary(ImportSummary) {
535 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
538 bool runOnModule(Module &M) override {
542 auto OREGetter = function_ref<OptimizationRemarkEmitter &(Function *)>();
545 return DevirtModule::runForTesting(M, LegacyAARGetter(*this), OREGetter);
547 return DevirtModule(M, LegacyAARGetter(*this), OREGetter, ExportSummary,
552 void getAnalysisUsage(AnalysisUsage &AU) const override {
553 AU.addRequired<AssumptionCacheTracker>();
554 AU.addRequired<TargetLibraryInfoWrapperPass>();
558 } // end anonymous namespace
560 INITIALIZE_PASS_BEGIN(WholeProgramDevirt, "wholeprogramdevirt",
561 "Whole program devirtualization", false, false)
562 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
563 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
564 INITIALIZE_PASS_END(WholeProgramDevirt, "wholeprogramdevirt",
565 "Whole program devirtualization", false, false)
566 char WholeProgramDevirt::ID = 0;
569 llvm::createWholeProgramDevirtPass(ModuleSummaryIndex *ExportSummary,
570 const ModuleSummaryIndex *ImportSummary) {
571 return new WholeProgramDevirt(ExportSummary, ImportSummary);
574 PreservedAnalyses WholeProgramDevirtPass::run(Module &M,
575 ModuleAnalysisManager &AM) {
576 auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
577 auto AARGetter = [&](Function &F) -> AAResults & {
578 return FAM.getResult<AAManager>(F);
580 auto OREGetter = [&](Function *F) -> OptimizationRemarkEmitter & {
581 return FAM.getResult<OptimizationRemarkEmitterAnalysis>(*F);
583 if (!DevirtModule(M, AARGetter, OREGetter, nullptr, nullptr).run())
584 return PreservedAnalyses::all();
585 return PreservedAnalyses::none();
588 bool DevirtModule::runForTesting(
589 Module &M, function_ref<AAResults &(Function &)> AARGetter,
590 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter) {
591 ModuleSummaryIndex Summary;
593 // Handle the command-line summary arguments. This code is for testing
594 // purposes only, so we handle errors directly.
595 if (!ClReadSummary.empty()) {
596 ExitOnError ExitOnErr("-wholeprogramdevirt-read-summary: " + ClReadSummary +
598 auto ReadSummaryFile =
599 ExitOnErr(errorOrToExpected(MemoryBuffer::getFile(ClReadSummary)));
601 yaml::Input In(ReadSummaryFile->getBuffer());
603 ExitOnErr(errorCodeToError(In.error()));
608 M, AARGetter, OREGetter,
609 ClSummaryAction == PassSummaryAction::Export ? &Summary : nullptr,
610 ClSummaryAction == PassSummaryAction::Import ? &Summary : nullptr)
613 if (!ClWriteSummary.empty()) {
614 ExitOnError ExitOnErr(
615 "-wholeprogramdevirt-write-summary: " + ClWriteSummary + ": ");
617 raw_fd_ostream OS(ClWriteSummary, EC, sys::fs::F_Text);
618 ExitOnErr(errorCodeToError(EC));
620 yaml::Output Out(OS);
627 void DevirtModule::buildTypeIdentifierMap(
628 std::vector<VTableBits> &Bits,
629 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) {
630 DenseMap<GlobalVariable *, VTableBits *> GVToBits;
631 Bits.reserve(M.getGlobalList().size());
632 SmallVector<MDNode *, 2> Types;
633 for (GlobalVariable &GV : M.globals()) {
635 GV.getMetadata(LLVMContext::MD_type, Types);
639 VTableBits *&BitsPtr = GVToBits[&GV];
642 Bits.back().GV = &GV;
643 Bits.back().ObjectSize =
644 M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType());
645 BitsPtr = &Bits.back();
648 for (MDNode *Type : Types) {
649 auto TypeID = Type->getOperand(1).get();
653 cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
656 TypeIdMap[TypeID].insert({BitsPtr, Offset});
661 Constant *DevirtModule::getPointerAtOffset(Constant *I, uint64_t Offset) {
662 if (I->getType()->isPointerTy()) {
668 const DataLayout &DL = M.getDataLayout();
670 if (auto *C = dyn_cast<ConstantStruct>(I)) {
671 const StructLayout *SL = DL.getStructLayout(C->getType());
672 if (Offset >= SL->getSizeInBytes())
675 unsigned Op = SL->getElementContainingOffset(Offset);
676 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
677 Offset - SL->getElementOffset(Op));
679 if (auto *C = dyn_cast<ConstantArray>(I)) {
680 ArrayType *VTableTy = C->getType();
681 uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
683 unsigned Op = Offset / ElemSize;
684 if (Op >= C->getNumOperands())
687 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
693 bool DevirtModule::tryFindVirtualCallTargets(
694 std::vector<VirtualCallTarget> &TargetsForSlot,
695 const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset) {
696 for (const TypeMemberInfo &TM : TypeMemberInfos) {
697 if (!TM.Bits->GV->isConstant())
700 Constant *Ptr = getPointerAtOffset(TM.Bits->GV->getInitializer(),
701 TM.Offset + ByteOffset);
705 auto Fn = dyn_cast<Function>(Ptr->stripPointerCasts());
709 // We can disregard __cxa_pure_virtual as a possible call target, as
710 // calls to pure virtuals are UB.
711 if (Fn->getName() == "__cxa_pure_virtual")
714 TargetsForSlot.push_back({Fn, &TM});
717 // Give up if we couldn't find any targets.
718 return !TargetsForSlot.empty();
721 void DevirtModule::applySingleImplDevirt(VTableSlotInfo &SlotInfo,
722 Constant *TheFn, bool &IsExported) {
723 auto Apply = [&](CallSiteInfo &CSInfo) {
724 for (auto &&VCallSite : CSInfo.CallSites) {
726 VCallSite.emitRemark("single-impl", TheFn->getName(), OREGetter);
727 VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast(
728 TheFn, VCallSite.CS.getCalledValue()->getType()));
729 // This use is no longer unsafe.
730 if (VCallSite.NumUnsafeUses)
731 --*VCallSite.NumUnsafeUses;
733 if (CSInfo.isExported()) {
738 Apply(SlotInfo.CSInfo);
739 for (auto &P : SlotInfo.ConstCSInfo)
743 bool DevirtModule::trySingleImplDevirt(
744 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
745 VTableSlotInfo &SlotInfo, WholeProgramDevirtResolution *Res) {
746 // See if the program contains a single implementation of this virtual
748 Function *TheFn = TargetsForSlot[0].Fn;
749 for (auto &&Target : TargetsForSlot)
750 if (TheFn != Target.Fn)
753 // If so, update each call site to call that implementation directly.
755 TargetsForSlot[0].WasDevirt = true;
757 bool IsExported = false;
758 applySingleImplDevirt(SlotInfo, TheFn, IsExported);
762 // If the only implementation has local linkage, we must promote to external
763 // to make it visible to thin LTO objects. We can only get here during the
764 // ThinLTO export phase.
765 if (TheFn->hasLocalLinkage()) {
766 std::string NewName = (TheFn->getName() + "$merged").str();
768 // Since we are renaming the function, any comdats with the same name must
769 // also be renamed. This is required when targeting COFF, as the comdat name
770 // must match one of the names of the symbols in the comdat.
771 if (Comdat *C = TheFn->getComdat()) {
772 if (C->getName() == TheFn->getName()) {
773 Comdat *NewC = M.getOrInsertComdat(NewName);
774 NewC->setSelectionKind(C->getSelectionKind());
775 for (GlobalObject &GO : M.global_objects())
776 if (GO.getComdat() == C)
781 TheFn->setLinkage(GlobalValue::ExternalLinkage);
782 TheFn->setVisibility(GlobalValue::HiddenVisibility);
783 TheFn->setName(NewName);
786 Res->TheKind = WholeProgramDevirtResolution::SingleImpl;
787 Res->SingleImplName = TheFn->getName();
792 bool DevirtModule::tryEvaluateFunctionsWithArgs(
793 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
794 ArrayRef<uint64_t> Args) {
795 // Evaluate each function and store the result in each target's RetVal
797 for (VirtualCallTarget &Target : TargetsForSlot) {
798 if (Target.Fn->arg_size() != Args.size() + 1)
801 Evaluator Eval(M.getDataLayout(), nullptr);
802 SmallVector<Constant *, 2> EvalArgs;
804 Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0)));
805 for (unsigned I = 0; I != Args.size(); ++I) {
806 auto *ArgTy = dyn_cast<IntegerType>(
807 Target.Fn->getFunctionType()->getParamType(I + 1));
810 EvalArgs.push_back(ConstantInt::get(ArgTy, Args[I]));
814 if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) ||
815 !isa<ConstantInt>(RetVal))
817 Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
822 void DevirtModule::applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
823 uint64_t TheRetVal) {
824 for (auto Call : CSInfo.CallSites)
825 Call.replaceAndErase(
826 "uniform-ret-val", FnName, RemarksEnabled, OREGetter,
827 ConstantInt::get(cast<IntegerType>(Call.CS.getType()), TheRetVal));
831 bool DevirtModule::tryUniformRetValOpt(
832 MutableArrayRef<VirtualCallTarget> TargetsForSlot, CallSiteInfo &CSInfo,
833 WholeProgramDevirtResolution::ByArg *Res) {
834 // Uniform return value optimization. If all functions return the same
835 // constant, replace all calls with that constant.
836 uint64_t TheRetVal = TargetsForSlot[0].RetVal;
837 for (const VirtualCallTarget &Target : TargetsForSlot)
838 if (Target.RetVal != TheRetVal)
841 if (CSInfo.isExported()) {
842 Res->TheKind = WholeProgramDevirtResolution::ByArg::UniformRetVal;
843 Res->Info = TheRetVal;
846 applyUniformRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), TheRetVal);
848 for (auto &&Target : TargetsForSlot)
849 Target.WasDevirt = true;
853 std::string DevirtModule::getGlobalName(VTableSlot Slot,
854 ArrayRef<uint64_t> Args,
856 std::string FullName = "__typeid_";
857 raw_string_ostream OS(FullName);
858 OS << cast<MDString>(Slot.TypeID)->getString() << '_' << Slot.ByteOffset;
859 for (uint64_t Arg : Args)
865 bool DevirtModule::shouldExportConstantsAsAbsoluteSymbols() {
866 Triple T(M.getTargetTriple());
867 return (T.getArch() == Triple::x86 || T.getArch() == Triple::x86_64) &&
868 T.getObjectFormat() == Triple::ELF;
871 void DevirtModule::exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
872 StringRef Name, Constant *C) {
873 GlobalAlias *GA = GlobalAlias::create(Int8Ty, 0, GlobalValue::ExternalLinkage,
874 getGlobalName(Slot, Args, Name), C, &M);
875 GA->setVisibility(GlobalValue::HiddenVisibility);
878 void DevirtModule::exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
879 StringRef Name, uint32_t Const,
881 if (shouldExportConstantsAsAbsoluteSymbols()) {
884 ConstantExpr::getIntToPtr(ConstantInt::get(Int32Ty, Const), Int8PtrTy));
891 Constant *DevirtModule::importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
893 Constant *C = M.getOrInsertGlobal(getGlobalName(Slot, Args, Name), Int8Ty);
894 auto *GV = dyn_cast<GlobalVariable>(C);
896 GV->setVisibility(GlobalValue::HiddenVisibility);
900 Constant *DevirtModule::importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
901 StringRef Name, IntegerType *IntTy,
903 if (!shouldExportConstantsAsAbsoluteSymbols())
904 return ConstantInt::get(IntTy, Storage);
906 Constant *C = importGlobal(Slot, Args, Name);
907 auto *GV = cast<GlobalVariable>(C->stripPointerCasts());
908 C = ConstantExpr::getPtrToInt(C, IntTy);
910 // We only need to set metadata if the global is newly created, in which
911 // case it would not have hidden visibility.
912 if (GV->getMetadata(LLVMContext::MD_absolute_symbol))
915 auto SetAbsRange = [&](uint64_t Min, uint64_t Max) {
916 auto *MinC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Min));
917 auto *MaxC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Max));
918 GV->setMetadata(LLVMContext::MD_absolute_symbol,
919 MDNode::get(M.getContext(), {MinC, MaxC}));
921 unsigned AbsWidth = IntTy->getBitWidth();
922 if (AbsWidth == IntPtrTy->getBitWidth())
923 SetAbsRange(~0ull, ~0ull); // Full set.
925 SetAbsRange(0, 1ull << AbsWidth);
929 void DevirtModule::applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
931 Constant *UniqueMemberAddr) {
932 for (auto &&Call : CSInfo.CallSites) {
933 IRBuilder<> B(Call.CS.getInstruction());
935 B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE,
936 B.CreateBitCast(Call.VTable, Int8PtrTy), UniqueMemberAddr);
937 Cmp = B.CreateZExt(Cmp, Call.CS->getType());
938 Call.replaceAndErase("unique-ret-val", FnName, RemarksEnabled, OREGetter,
944 bool DevirtModule::tryUniqueRetValOpt(
945 unsigned BitWidth, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
946 CallSiteInfo &CSInfo, WholeProgramDevirtResolution::ByArg *Res,
947 VTableSlot Slot, ArrayRef<uint64_t> Args) {
948 // IsOne controls whether we look for a 0 or a 1.
949 auto tryUniqueRetValOptFor = [&](bool IsOne) {
950 const TypeMemberInfo *UniqueMember = nullptr;
951 for (const VirtualCallTarget &Target : TargetsForSlot) {
952 if (Target.RetVal == (IsOne ? 1 : 0)) {
955 UniqueMember = Target.TM;
959 // We should have found a unique member or bailed out by now. We already
960 // checked for a uniform return value in tryUniformRetValOpt.
961 assert(UniqueMember);
963 Constant *UniqueMemberAddr =
964 ConstantExpr::getBitCast(UniqueMember->Bits->GV, Int8PtrTy);
965 UniqueMemberAddr = ConstantExpr::getGetElementPtr(
966 Int8Ty, UniqueMemberAddr,
967 ConstantInt::get(Int64Ty, UniqueMember->Offset));
969 if (CSInfo.isExported()) {
970 Res->TheKind = WholeProgramDevirtResolution::ByArg::UniqueRetVal;
973 exportGlobal(Slot, Args, "unique_member", UniqueMemberAddr);
976 // Replace each call with the comparison.
977 applyUniqueRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), IsOne,
980 // Update devirtualization statistics for targets.
982 for (auto &&Target : TargetsForSlot)
983 Target.WasDevirt = true;
989 if (tryUniqueRetValOptFor(true))
991 if (tryUniqueRetValOptFor(false))
997 void DevirtModule::applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName,
998 Constant *Byte, Constant *Bit) {
999 for (auto Call : CSInfo.CallSites) {
1000 auto *RetType = cast<IntegerType>(Call.CS.getType());
1001 IRBuilder<> B(Call.CS.getInstruction());
1003 B.CreateGEP(Int8Ty, B.CreateBitCast(Call.VTable, Int8PtrTy), Byte);
1004 if (RetType->getBitWidth() == 1) {
1005 Value *Bits = B.CreateLoad(Addr);
1006 Value *BitsAndBit = B.CreateAnd(Bits, Bit);
1007 auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0));
1008 Call.replaceAndErase("virtual-const-prop-1-bit", FnName, RemarksEnabled,
1009 OREGetter, IsBitSet);
1011 Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo());
1012 Value *Val = B.CreateLoad(RetType, ValAddr);
1013 Call.replaceAndErase("virtual-const-prop", FnName, RemarksEnabled,
1017 CSInfo.markDevirt();
1020 bool DevirtModule::tryVirtualConstProp(
1021 MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo,
1022 WholeProgramDevirtResolution *Res, VTableSlot Slot) {
1023 // This only works if the function returns an integer.
1024 auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
1027 unsigned BitWidth = RetType->getBitWidth();
1031 // Make sure that each function is defined, does not access memory, takes at
1032 // least one argument, does not use its first argument (which we assume is
1033 // 'this'), and has the same return type.
1035 // Note that we test whether this copy of the function is readnone, rather
1036 // than testing function attributes, which must hold for any copy of the
1037 // function, even a less optimized version substituted at link time. This is
1038 // sound because the virtual constant propagation optimizations effectively
1039 // inline all implementations of the virtual function into each call site,
1040 // rather than using function attributes to perform local optimization.
1041 for (VirtualCallTarget &Target : TargetsForSlot) {
1042 if (Target.Fn->isDeclaration() ||
1043 computeFunctionBodyMemoryAccess(*Target.Fn, AARGetter(*Target.Fn)) !=
1045 Target.Fn->arg_empty() || !Target.Fn->arg_begin()->use_empty() ||
1046 Target.Fn->getReturnType() != RetType)
1050 for (auto &&CSByConstantArg : SlotInfo.ConstCSInfo) {
1051 if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
1054 WholeProgramDevirtResolution::ByArg *ResByArg = nullptr;
1056 ResByArg = &Res->ResByArg[CSByConstantArg.first];
1058 if (tryUniformRetValOpt(TargetsForSlot, CSByConstantArg.second, ResByArg))
1061 if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second,
1062 ResByArg, Slot, CSByConstantArg.first))
1065 // Find an allocation offset in bits in all vtables associated with the
1067 uint64_t AllocBefore =
1068 findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth);
1069 uint64_t AllocAfter =
1070 findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth);
1072 // Calculate the total amount of padding needed to store a value at both
1073 // ends of the object.
1074 uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
1075 for (auto &&Target : TargetsForSlot) {
1076 TotalPaddingBefore += std::max<int64_t>(
1077 (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0);
1078 TotalPaddingAfter += std::max<int64_t>(
1079 (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0);
1082 // If the amount of padding is too large, give up.
1083 // FIXME: do something smarter here.
1084 if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
1087 // Calculate the offset to the value as a (possibly negative) byte offset
1088 // and (if applicable) a bit offset, and store the values in the targets.
1091 if (TotalPaddingBefore <= TotalPaddingAfter)
1092 setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte,
1095 setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte,
1099 for (auto &&Target : TargetsForSlot)
1100 Target.WasDevirt = true;
1103 if (CSByConstantArg.second.isExported()) {
1104 ResByArg->TheKind = WholeProgramDevirtResolution::ByArg::VirtualConstProp;
1105 exportConstant(Slot, CSByConstantArg.first, "byte", OffsetByte,
1107 exportConstant(Slot, CSByConstantArg.first, "bit", 1ULL << OffsetBit,
1111 // Rewrite each call to a load from OffsetByte/OffsetBit.
1112 Constant *ByteConst = ConstantInt::get(Int32Ty, OffsetByte);
1113 Constant *BitConst = ConstantInt::get(Int8Ty, 1ULL << OffsetBit);
1114 applyVirtualConstProp(CSByConstantArg.second,
1115 TargetsForSlot[0].Fn->getName(), ByteConst, BitConst);
1120 void DevirtModule::rebuildGlobal(VTableBits &B) {
1121 if (B.Before.Bytes.empty() && B.After.Bytes.empty())
1124 // Align each byte array to pointer width.
1125 unsigned PointerSize = M.getDataLayout().getPointerSize();
1126 B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize));
1127 B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize));
1129 // Before was stored in reverse order; flip it now.
1130 for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I)
1131 std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]);
1133 // Build an anonymous global containing the before bytes, followed by the
1134 // original initializer, followed by the after bytes.
1135 auto NewInit = ConstantStruct::getAnon(
1136 {ConstantDataArray::get(M.getContext(), B.Before.Bytes),
1137 B.GV->getInitializer(),
1138 ConstantDataArray::get(M.getContext(), B.After.Bytes)});
1140 new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(),
1141 GlobalVariable::PrivateLinkage, NewInit, "", B.GV);
1142 NewGV->setSection(B.GV->getSection());
1143 NewGV->setComdat(B.GV->getComdat());
1145 // Copy the original vtable's metadata to the anonymous global, adjusting
1146 // offsets as required.
1147 NewGV->copyMetadata(B.GV, B.Before.Bytes.size());
1149 // Build an alias named after the original global, pointing at the second
1150 // element (the original initializer).
1151 auto Alias = GlobalAlias::create(
1152 B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "",
1153 ConstantExpr::getGetElementPtr(
1154 NewInit->getType(), NewGV,
1155 ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0),
1156 ConstantInt::get(Int32Ty, 1)}),
1158 Alias->setVisibility(B.GV->getVisibility());
1159 Alias->takeName(B.GV);
1161 B.GV->replaceAllUsesWith(Alias);
1162 B.GV->eraseFromParent();
1165 bool DevirtModule::areRemarksEnabled() {
1166 const auto &FL = M.getFunctionList();
1169 const Function &Fn = FL.front();
1171 const auto &BBL = Fn.getBasicBlockList();
1174 auto DI = OptimizationRemark(DEBUG_TYPE, "", DebugLoc(), &BBL.front());
1175 return DI.isEnabled();
1178 void DevirtModule::scanTypeTestUsers(Function *TypeTestFunc,
1179 Function *AssumeFunc) {
1180 // Find all virtual calls via a virtual table pointer %p under an assumption
1181 // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p
1182 // points to a member of the type identifier %md. Group calls by (type ID,
1183 // offset) pair (effectively the identity of the virtual function) and store
1185 DenseSet<Value *> SeenPtrs;
1186 for (auto I = TypeTestFunc->use_begin(), E = TypeTestFunc->use_end();
1188 auto CI = dyn_cast<CallInst>(I->getUser());
1193 // Search for virtual calls based on %p and add them to DevirtCalls.
1194 SmallVector<DevirtCallSite, 1> DevirtCalls;
1195 SmallVector<CallInst *, 1> Assumes;
1196 findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI);
1198 // If we found any, add them to CallSlots. Only do this if we haven't seen
1199 // the vtable pointer before, as it may have been CSE'd with pointers from
1200 // other call sites, and we don't want to process call sites multiple times.
1201 if (!Assumes.empty()) {
1203 cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
1204 Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
1205 if (SeenPtrs.insert(Ptr).second) {
1206 for (DevirtCallSite Call : DevirtCalls) {
1207 CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CS, nullptr);
1212 // We no longer need the assumes or the type test.
1213 for (auto Assume : Assumes)
1214 Assume->eraseFromParent();
1215 // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we
1216 // may use the vtable argument later.
1217 if (CI->use_empty())
1218 CI->eraseFromParent();
1222 void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) {
1223 Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test);
1225 for (auto I = TypeCheckedLoadFunc->use_begin(),
1226 E = TypeCheckedLoadFunc->use_end();
1228 auto CI = dyn_cast<CallInst>(I->getUser());
1233 Value *Ptr = CI->getArgOperand(0);
1234 Value *Offset = CI->getArgOperand(1);
1235 Value *TypeIdValue = CI->getArgOperand(2);
1236 Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata();
1238 SmallVector<DevirtCallSite, 1> DevirtCalls;
1239 SmallVector<Instruction *, 1> LoadedPtrs;
1240 SmallVector<Instruction *, 1> Preds;
1241 bool HasNonCallUses = false;
1242 findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
1243 HasNonCallUses, CI);
1245 // Start by generating "pessimistic" code that explicitly loads the function
1246 // pointer from the vtable and performs the type check. If possible, we will
1247 // eliminate the load and the type check later.
1249 // If possible, only generate the load at the point where it is used.
1250 // This helps avoid unnecessary spills.
1252 (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI);
1253 Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset);
1254 Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy));
1255 Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr);
1257 for (Instruction *LoadedPtr : LoadedPtrs) {
1258 LoadedPtr->replaceAllUsesWith(LoadedValue);
1259 LoadedPtr->eraseFromParent();
1262 // Likewise for the type test.
1263 IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI);
1264 CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue});
1266 for (Instruction *Pred : Preds) {
1267 Pred->replaceAllUsesWith(TypeTestCall);
1268 Pred->eraseFromParent();
1271 // We have already erased any extractvalue instructions that refer to the
1272 // intrinsic call, but the intrinsic may have other non-extractvalue uses
1273 // (although this is unlikely). In that case, explicitly build a pair and
1275 if (!CI->use_empty()) {
1276 Value *Pair = UndefValue::get(CI->getType());
1278 Pair = B.CreateInsertValue(Pair, LoadedValue, {0});
1279 Pair = B.CreateInsertValue(Pair, TypeTestCall, {1});
1280 CI->replaceAllUsesWith(Pair);
1283 // The number of unsafe uses is initially the number of uses.
1284 auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall];
1285 NumUnsafeUses = DevirtCalls.size();
1287 // If the function pointer has a non-call user, we cannot eliminate the type
1288 // check, as one of those users may eventually call the pointer. Increment
1289 // the unsafe use count to make sure it cannot reach zero.
1292 for (DevirtCallSite Call : DevirtCalls) {
1293 CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CS,
1297 CI->eraseFromParent();
1301 void DevirtModule::importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo) {
1302 const TypeIdSummary *TidSummary =
1303 ImportSummary->getTypeIdSummary(cast<MDString>(Slot.TypeID)->getString());
1306 auto ResI = TidSummary->WPDRes.find(Slot.ByteOffset);
1307 if (ResI == TidSummary->WPDRes.end())
1309 const WholeProgramDevirtResolution &Res = ResI->second;
1311 if (Res.TheKind == WholeProgramDevirtResolution::SingleImpl) {
1312 // The type of the function in the declaration is irrelevant because every
1313 // call site will cast it to the correct type.
1314 auto *SingleImpl = M.getOrInsertFunction(
1315 Res.SingleImplName, Type::getVoidTy(M.getContext()));
1317 // This is the import phase so we should not be exporting anything.
1318 bool IsExported = false;
1319 applySingleImplDevirt(SlotInfo, SingleImpl, IsExported);
1320 assert(!IsExported);
1323 for (auto &CSByConstantArg : SlotInfo.ConstCSInfo) {
1324 auto I = Res.ResByArg.find(CSByConstantArg.first);
1325 if (I == Res.ResByArg.end())
1327 auto &ResByArg = I->second;
1328 // FIXME: We should figure out what to do about the "function name" argument
1329 // to the apply* functions, as the function names are unavailable during the
1330 // importing phase. For now we just pass the empty string. This does not
1331 // impact correctness because the function names are just used for remarks.
1332 switch (ResByArg.TheKind) {
1333 case WholeProgramDevirtResolution::ByArg::UniformRetVal:
1334 applyUniformRetValOpt(CSByConstantArg.second, "", ResByArg.Info);
1336 case WholeProgramDevirtResolution::ByArg::UniqueRetVal: {
1337 Constant *UniqueMemberAddr =
1338 importGlobal(Slot, CSByConstantArg.first, "unique_member");
1339 applyUniqueRetValOpt(CSByConstantArg.second, "", ResByArg.Info,
1343 case WholeProgramDevirtResolution::ByArg::VirtualConstProp: {
1344 Constant *Byte = importConstant(Slot, CSByConstantArg.first, "byte",
1345 Int32Ty, ResByArg.Byte);
1346 Constant *Bit = importConstant(Slot, CSByConstantArg.first, "bit", Int8Ty,
1348 applyVirtualConstProp(CSByConstantArg.second, "", Byte, Bit);
1357 void DevirtModule::removeRedundantTypeTests() {
1358 auto True = ConstantInt::getTrue(M.getContext());
1359 for (auto &&U : NumUnsafeUsesForTypeTest) {
1360 if (U.second == 0) {
1361 U.first->replaceAllUsesWith(True);
1362 U.first->eraseFromParent();
1367 bool DevirtModule::run() {
1368 Function *TypeTestFunc =
1369 M.getFunction(Intrinsic::getName(Intrinsic::type_test));
1370 Function *TypeCheckedLoadFunc =
1371 M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load));
1372 Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume));
1374 // Normally if there are no users of the devirtualization intrinsics in the
1375 // module, this pass has nothing to do. But if we are exporting, we also need
1376 // to handle any users that appear only in the function summaries.
1377 if (!ExportSummary &&
1378 (!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc ||
1379 AssumeFunc->use_empty()) &&
1380 (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty()))
1383 if (TypeTestFunc && AssumeFunc)
1384 scanTypeTestUsers(TypeTestFunc, AssumeFunc);
1386 if (TypeCheckedLoadFunc)
1387 scanTypeCheckedLoadUsers(TypeCheckedLoadFunc);
1389 if (ImportSummary) {
1390 for (auto &S : CallSlots)
1391 importResolution(S.first, S.second);
1393 removeRedundantTypeTests();
1395 // The rest of the code is only necessary when exporting or during regular
1396 // LTO, so we are done.
1400 // Rebuild type metadata into a map for easy lookup.
1401 std::vector<VTableBits> Bits;
1402 DenseMap<Metadata *, std::set<TypeMemberInfo>> TypeIdMap;
1403 buildTypeIdentifierMap(Bits, TypeIdMap);
1404 if (TypeIdMap.empty())
1407 // Collect information from summary about which calls to try to devirtualize.
1408 if (ExportSummary) {
1409 DenseMap<GlobalValue::GUID, TinyPtrVector<Metadata *>> MetadataByGUID;
1410 for (auto &P : TypeIdMap) {
1411 if (auto *TypeId = dyn_cast<MDString>(P.first))
1412 MetadataByGUID[GlobalValue::getGUID(TypeId->getString())].push_back(
1416 for (auto &P : *ExportSummary) {
1417 for (auto &S : P.second.SummaryList) {
1418 auto *FS = dyn_cast<FunctionSummary>(S.get());
1421 // FIXME: Only add live functions.
1422 for (FunctionSummary::VFuncId VF : FS->type_test_assume_vcalls()) {
1423 for (Metadata *MD : MetadataByGUID[VF.GUID]) {
1424 CallSlots[{MD, VF.Offset}].CSInfo.SummaryHasTypeTestAssumeUsers =
1428 for (FunctionSummary::VFuncId VF : FS->type_checked_load_vcalls()) {
1429 for (Metadata *MD : MetadataByGUID[VF.GUID]) {
1430 CallSlots[{MD, VF.Offset}]
1431 .CSInfo.SummaryTypeCheckedLoadUsers.push_back(FS);
1434 for (const FunctionSummary::ConstVCall &VC :
1435 FS->type_test_assume_const_vcalls()) {
1436 for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) {
1437 CallSlots[{MD, VC.VFunc.Offset}]
1438 .ConstCSInfo[VC.Args]
1439 .SummaryHasTypeTestAssumeUsers = true;
1442 for (const FunctionSummary::ConstVCall &VC :
1443 FS->type_checked_load_const_vcalls()) {
1444 for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) {
1445 CallSlots[{MD, VC.VFunc.Offset}]
1446 .ConstCSInfo[VC.Args]
1447 .SummaryTypeCheckedLoadUsers.push_back(FS);
1454 // For each (type, offset) pair:
1455 bool DidVirtualConstProp = false;
1456 std::map<std::string, Function*> DevirtTargets;
1457 for (auto &S : CallSlots) {
1458 // Search each of the members of the type identifier for the virtual
1459 // function implementation at offset S.first.ByteOffset, and add to
1461 std::vector<VirtualCallTarget> TargetsForSlot;
1462 if (tryFindVirtualCallTargets(TargetsForSlot, TypeIdMap[S.first.TypeID],
1463 S.first.ByteOffset)) {
1464 WholeProgramDevirtResolution *Res = nullptr;
1465 if (ExportSummary && isa<MDString>(S.first.TypeID))
1466 Res = &ExportSummary
1467 ->getOrInsertTypeIdSummary(
1468 cast<MDString>(S.first.TypeID)->getString())
1469 .WPDRes[S.first.ByteOffset];
1471 if (!trySingleImplDevirt(TargetsForSlot, S.second, Res) &&
1472 tryVirtualConstProp(TargetsForSlot, S.second, Res, S.first))
1473 DidVirtualConstProp = true;
1475 // Collect functions devirtualized at least for one call site for stats.
1477 for (const auto &T : TargetsForSlot)
1479 DevirtTargets[T.Fn->getName()] = T.Fn;
1482 // CFI-specific: if we are exporting and any llvm.type.checked.load
1483 // intrinsics were *not* devirtualized, we need to add the resulting
1484 // llvm.type.test intrinsics to the function summaries so that the
1485 // LowerTypeTests pass will export them.
1486 if (ExportSummary && isa<MDString>(S.first.TypeID)) {
1488 GlobalValue::getGUID(cast<MDString>(S.first.TypeID)->getString());
1489 for (auto FS : S.second.CSInfo.SummaryTypeCheckedLoadUsers)
1490 FS->addTypeTest(GUID);
1491 for (auto &CCS : S.second.ConstCSInfo)
1492 for (auto FS : CCS.second.SummaryTypeCheckedLoadUsers)
1493 FS->addTypeTest(GUID);
1497 if (RemarksEnabled) {
1498 // Generate remarks for each devirtualized function.
1499 for (const auto &DT : DevirtTargets) {
1500 Function *F = DT.second;
1502 // In the new pass manager, we can request the optimization
1503 // remark emitter pass on a per-function-basis, which the
1504 // OREGetter will do for us.
1505 // In the old pass manager, this is harder, so we just build
1506 // a optimization remark emitter on the fly, when we need it.
1507 std::unique_ptr<OptimizationRemarkEmitter> OwnedORE;
1508 OptimizationRemarkEmitter *ORE;
1510 ORE = &OREGetter(F);
1512 OwnedORE = make_unique<OptimizationRemarkEmitter>(F);
1513 ORE = OwnedORE.get();
1516 using namespace ore;
1517 ORE->emit(OptimizationRemark(DEBUG_TYPE, "Devirtualized", F)
1518 << "devirtualized " << NV("FunctionName", F->getName()));
1522 removeRedundantTypeTests();
1524 // Rebuild each global we touched as part of virtual constant propagation to
1525 // include the before and after bytes.
1526 if (DidVirtualConstProp)
1527 for (VTableBits &B : Bits)