1 //- CFLAndersAliasAnalysis.cpp - Unification-based Alias Analysis ---*- C++-*-//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements a CFL-based, summary-based alias analysis algorithm. It
11 // differs from CFLSteensAliasAnalysis in its inclusion-based nature while
12 // CFLSteensAliasAnalysis is unification-based. This pass has worse performance
13 // than CFLSteensAliasAnalysis (the worst case complexity of
14 // CFLAndersAliasAnalysis is cubic, while the worst case complexity of
15 // CFLSteensAliasAnalysis is almost linear), but it is able to yield more
16 // precise analysis result. The precision of this analysis is roughly the same
17 // as that of an one level context-sensitive Andersen's algorithm.
19 // The algorithm used here is based on recursive state machine matching scheme
20 // proposed in "Demand-driven alias analysis for C" by Xin Zheng and Radu
21 // Rugina. The general idea is to extend the tranditional transitive closure
22 // algorithm to perform CFL matching along the way: instead of recording
23 // "whether X is reachable from Y", we keep track of "whether X is reachable
24 // from Y at state Z", where the "state" field indicates where we are in the CFL
25 // matching process. To understand the matching better, it is advisable to have
26 // the state machine shown in Figure 3 of the paper available when reading the
27 // codes: all we do here is to selectively expand the transitive closure by
28 // discarding edges that are not recognized by the state machine.
30 // There are two differences between our current implementation and the one
31 // described in the paper:
32 // - Our algorithm eagerly computes all alias pairs after the CFLGraph is built,
33 // while in the paper the authors did the computation in a demand-driven
34 // fashion. We did not implement the demand-driven algorithm due to the
35 // additional coding complexity and higher memory profile, but if we found it
36 // necessary we may switch to it eventually.
37 // - In the paper the authors use a state machine that does not distinguish
38 // value reads from value writes. For example, if Y is reachable from X at state
39 // S3, it may be the case that X is written into Y, or it may be the case that
40 // there's a third value Z that writes into both X and Y. To make that
41 // distinction (which is crucial in building function summary as well as
42 // retrieving mod-ref info), we choose to duplicate some of the states in the
43 // paper's proposed state machine. The duplication does not change the set the
44 // machine accepts. Given a pair of reachable values, it only provides more
45 // detailed information on which value is being written into and which is being
48 //===----------------------------------------------------------------------===//
50 // N.B. AliasAnalysis as a whole is phrased as a FunctionPass at the moment, and
51 // CFLAndersAA is interprocedural. This is *technically* A Bad Thing, because
52 // FunctionPasses are only allowed to inspect the Function that they're being
53 // run on. Realistically, this likely isn't a problem until we allow
54 // FunctionPasses to run concurrently.
56 #include "llvm/Analysis/CFLAndersAliasAnalysis.h"
58 #include "llvm/ADT/DenseSet.h"
59 #include "llvm/Pass.h"
62 using namespace llvm::cflaa;
64 #define DEBUG_TYPE "cfl-anders-aa"
66 CFLAndersAAResult::CFLAndersAAResult(const TargetLibraryInfo &TLI) : TLI(TLI) {}
67 CFLAndersAAResult::CFLAndersAAResult(CFLAndersAAResult &&RHS)
68 : AAResultBase(std::move(RHS)), TLI(RHS.TLI) {}
69 CFLAndersAAResult::~CFLAndersAAResult() {}
71 static const Function *parentFunctionOfValue(const Value *Val) {
72 if (auto *Inst = dyn_cast<Instruction>(Val)) {
73 auto *Bb = Inst->getParent();
74 return Bb->getParent();
77 if (auto *Arg = dyn_cast<Argument>(Val))
78 return Arg->getParent();
84 enum class MatchState : uint8_t {
85 // The following state represents S1 in the paper.
87 // The following two states together represent S2 in the paper.
88 // The 'NoReadWrite' suffix indicates that there exists an alias path that
89 // does not contain assignment and reverse assignment edges.
90 // The 'ReadOnly' suffix indicates that there exists an alias path that
91 // contains reverse assignment edges only.
92 FlowFromMemAliasNoReadWrite,
93 FlowFromMemAliasReadOnly,
94 // The following two states together represent S3 in the paper.
95 // The 'WriteOnly' suffix indicates that there exists an alias path that
96 // contains assignment edges only.
97 // The 'ReadWrite' suffix indicates that there exists an alias path that
98 // contains both assignment and reverse assignment edges. Note that if X and Y
99 // are reachable at 'ReadWrite' state, it does NOT mean X is both read from
100 // and written to Y. Instead, it means that a third value Z is written to both
104 // The following two states together represent S4 in the paper.
105 FlowToMemAliasWriteOnly,
106 FlowToMemAliasReadWrite,
109 typedef std::bitset<7> StateSet;
110 const unsigned ReadOnlyStateMask =
111 (1U << static_cast<uint8_t>(MatchState::FlowFromReadOnly)) |
112 (1U << static_cast<uint8_t>(MatchState::FlowFromMemAliasReadOnly));
113 const unsigned WriteOnlyStateMask =
114 (1U << static_cast<uint8_t>(MatchState::FlowToWriteOnly)) |
115 (1U << static_cast<uint8_t>(MatchState::FlowToMemAliasWriteOnly));
117 // A pair that consists of a value and an offset
123 bool operator==(OffsetValue LHS, OffsetValue RHS) {
124 return LHS.Val == RHS.Val && LHS.Offset == RHS.Offset;
126 bool operator<(OffsetValue LHS, OffsetValue RHS) {
127 return std::less<const Value *>()(LHS.Val, RHS.Val) ||
128 (LHS.Val == RHS.Val && LHS.Offset < RHS.Offset);
131 // A pair that consists of an InstantiatedValue and an offset
132 struct OffsetInstantiatedValue {
133 InstantiatedValue IVal;
137 bool operator==(OffsetInstantiatedValue LHS, OffsetInstantiatedValue RHS) {
138 return LHS.IVal == RHS.IVal && LHS.Offset == RHS.Offset;
141 // We use ReachabilitySet to keep track of value aliases (The nonterminal "V" in
142 // the paper) during the analysis.
143 class ReachabilitySet {
144 typedef DenseMap<InstantiatedValue, StateSet> ValueStateMap;
145 typedef DenseMap<InstantiatedValue, ValueStateMap> ValueReachMap;
146 ValueReachMap ReachMap;
149 typedef ValueStateMap::const_iterator const_valuestate_iterator;
150 typedef ValueReachMap::const_iterator const_value_iterator;
152 // Insert edge 'From->To' at state 'State'
153 bool insert(InstantiatedValue From, InstantiatedValue To, MatchState State) {
155 auto &States = ReachMap[To][From];
156 auto Idx = static_cast<size_t>(State);
157 if (!States.test(Idx)) {
164 // Return the set of all ('From', 'State') pair for a given node 'To'
165 iterator_range<const_valuestate_iterator>
166 reachableValueAliases(InstantiatedValue V) const {
167 auto Itr = ReachMap.find(V);
168 if (Itr == ReachMap.end())
169 return make_range<const_valuestate_iterator>(const_valuestate_iterator(),
170 const_valuestate_iterator());
171 return make_range<const_valuestate_iterator>(Itr->second.begin(),
175 iterator_range<const_value_iterator> value_mappings() const {
176 return make_range<const_value_iterator>(ReachMap.begin(), ReachMap.end());
180 // We use AliasMemSet to keep track of all memory aliases (the nonterminal "M"
181 // in the paper) during the analysis.
183 typedef DenseSet<InstantiatedValue> MemSet;
184 typedef DenseMap<InstantiatedValue, MemSet> MemMapType;
188 typedef MemSet::const_iterator const_mem_iterator;
190 bool insert(InstantiatedValue LHS, InstantiatedValue RHS) {
191 // Top-level values can never be memory aliases because one cannot take the
193 assert(LHS.DerefLevel > 0 && RHS.DerefLevel > 0);
194 return MemMap[LHS].insert(RHS).second;
197 const MemSet *getMemoryAliases(InstantiatedValue V) const {
198 auto Itr = MemMap.find(V);
199 if (Itr == MemMap.end())
205 // We use AliasAttrMap to keep track of the AliasAttr of each node.
207 typedef DenseMap<InstantiatedValue, AliasAttrs> MapType;
211 typedef MapType::const_iterator const_iterator;
213 bool add(InstantiatedValue V, AliasAttrs Attr) {
214 auto &OldAttr = AttrMap[V];
215 auto NewAttr = OldAttr | Attr;
216 if (OldAttr == NewAttr)
222 AliasAttrs getAttrs(InstantiatedValue V) const {
224 auto Itr = AttrMap.find(V);
225 if (Itr != AttrMap.end())
230 iterator_range<const_iterator> mappings() const {
231 return make_range<const_iterator>(AttrMap.begin(), AttrMap.end());
235 struct WorkListItem {
236 InstantiatedValue From;
237 InstantiatedValue To;
241 struct ValueSummary {
243 InterfaceValue IValue;
246 SmallVector<Record, 4> FromRecords, ToRecords;
251 // Specialize DenseMapInfo for OffsetValue.
252 template <> struct DenseMapInfo<OffsetValue> {
253 static OffsetValue getEmptyKey() {
254 return OffsetValue{DenseMapInfo<const Value *>::getEmptyKey(),
255 DenseMapInfo<int64_t>::getEmptyKey()};
257 static OffsetValue getTombstoneKey() {
258 return OffsetValue{DenseMapInfo<const Value *>::getTombstoneKey(),
259 DenseMapInfo<int64_t>::getEmptyKey()};
261 static unsigned getHashValue(const OffsetValue &OVal) {
262 return DenseMapInfo<std::pair<const Value *, int64_t>>::getHashValue(
263 std::make_pair(OVal.Val, OVal.Offset));
265 static bool isEqual(const OffsetValue &LHS, const OffsetValue &RHS) {
270 // Specialize DenseMapInfo for OffsetInstantiatedValue.
271 template <> struct DenseMapInfo<OffsetInstantiatedValue> {
272 static OffsetInstantiatedValue getEmptyKey() {
273 return OffsetInstantiatedValue{
274 DenseMapInfo<InstantiatedValue>::getEmptyKey(),
275 DenseMapInfo<int64_t>::getEmptyKey()};
277 static OffsetInstantiatedValue getTombstoneKey() {
278 return OffsetInstantiatedValue{
279 DenseMapInfo<InstantiatedValue>::getTombstoneKey(),
280 DenseMapInfo<int64_t>::getEmptyKey()};
282 static unsigned getHashValue(const OffsetInstantiatedValue &OVal) {
283 return DenseMapInfo<std::pair<InstantiatedValue, int64_t>>::getHashValue(
284 std::make_pair(OVal.IVal, OVal.Offset));
286 static bool isEqual(const OffsetInstantiatedValue &LHS,
287 const OffsetInstantiatedValue &RHS) {
293 class CFLAndersAAResult::FunctionInfo {
294 /// Map a value to other values that may alias it
295 /// Since the alias relation is symmetric, to save some space we assume values
296 /// are properly ordered: if a and b alias each other, and a < b, then b is in
297 /// AliasMap[a] but not vice versa.
298 DenseMap<const Value *, std::vector<OffsetValue>> AliasMap;
300 /// Map a value to its corresponding AliasAttrs
301 DenseMap<const Value *, AliasAttrs> AttrMap;
303 /// Summary of externally visible effects.
304 AliasSummary Summary;
306 Optional<AliasAttrs> getAttrs(const Value *) const;
309 FunctionInfo(const Function &, const SmallVectorImpl<Value *> &,
310 const ReachabilitySet &, const AliasAttrMap &);
312 bool mayAlias(const Value *, uint64_t, const Value *, uint64_t) const;
313 const AliasSummary &getAliasSummary() const { return Summary; }
316 static bool hasReadOnlyState(StateSet Set) {
317 return (Set & StateSet(ReadOnlyStateMask)).any();
320 static bool hasWriteOnlyState(StateSet Set) {
321 return (Set & StateSet(WriteOnlyStateMask)).any();
324 static Optional<InterfaceValue>
325 getInterfaceValue(InstantiatedValue IValue,
326 const SmallVectorImpl<Value *> &RetVals) {
327 auto Val = IValue.Val;
329 Optional<unsigned> Index;
330 if (auto Arg = dyn_cast<Argument>(Val))
331 Index = Arg->getArgNo() + 1;
332 else if (is_contained(RetVals, Val))
336 return InterfaceValue{*Index, IValue.DerefLevel};
340 static void populateAttrMap(DenseMap<const Value *, AliasAttrs> &AttrMap,
341 const AliasAttrMap &AMap) {
342 for (const auto &Mapping : AMap.mappings()) {
343 auto IVal = Mapping.first;
345 // Insert IVal into the map
346 auto &Attr = AttrMap[IVal.Val];
347 // AttrMap only cares about top-level values
348 if (IVal.DerefLevel == 0)
349 Attr |= Mapping.second;
354 populateAliasMap(DenseMap<const Value *, std::vector<OffsetValue>> &AliasMap,
355 const ReachabilitySet &ReachSet) {
356 for (const auto &OuterMapping : ReachSet.value_mappings()) {
357 // AliasMap only cares about top-level values
358 if (OuterMapping.first.DerefLevel > 0)
361 auto Val = OuterMapping.first.Val;
362 auto &AliasList = AliasMap[Val];
363 for (const auto &InnerMapping : OuterMapping.second) {
364 // Again, AliasMap only cares about top-level values
365 if (InnerMapping.first.DerefLevel == 0)
366 AliasList.push_back(OffsetValue{InnerMapping.first.Val, UnknownOffset});
369 // Sort AliasList for faster lookup
370 std::sort(AliasList.begin(), AliasList.end());
374 static void populateExternalRelations(
375 SmallVectorImpl<ExternalRelation> &ExtRelations, const Function &Fn,
376 const SmallVectorImpl<Value *> &RetVals, const ReachabilitySet &ReachSet) {
377 // If a function only returns one of its argument X, then X will be both an
378 // argument and a return value at the same time. This is an edge case that
379 // needs special handling here.
380 for (const auto &Arg : Fn.args()) {
381 if (is_contained(RetVals, &Arg)) {
382 auto ArgVal = InterfaceValue{Arg.getArgNo() + 1, 0};
383 auto RetVal = InterfaceValue{0, 0};
384 ExtRelations.push_back(ExternalRelation{ArgVal, RetVal, 0});
388 // Below is the core summary construction logic.
389 // A naive solution of adding only the value aliases that are parameters or
390 // return values in ReachSet to the summary won't work: It is possible that a
391 // parameter P is written into an intermediate value I, and the function
392 // subsequently returns *I. In that case, *I is does not value alias anything
393 // in ReachSet, and the naive solution will miss a summary edge from (P, 1) to
395 // To account for the aforementioned case, we need to check each non-parameter
396 // and non-return value for the possibility of acting as an intermediate.
397 // 'ValueMap' here records, for each value, which InterfaceValues read from or
398 // write into it. If both the read list and the write list of a given value
399 // are non-empty, we know that a particular value is an intermidate and we
400 // need to add summary edges from the writes to the reads.
401 DenseMap<Value *, ValueSummary> ValueMap;
402 for (const auto &OuterMapping : ReachSet.value_mappings()) {
403 if (auto Dst = getInterfaceValue(OuterMapping.first, RetVals)) {
404 for (const auto &InnerMapping : OuterMapping.second) {
405 // If Src is a param/return value, we get a same-level assignment.
406 if (auto Src = getInterfaceValue(InnerMapping.first, RetVals)) {
407 // This may happen if both Dst and Src are return values
411 if (hasReadOnlyState(InnerMapping.second))
412 ExtRelations.push_back(ExternalRelation{*Dst, *Src, UnknownOffset});
413 // No need to check for WriteOnly state, since ReachSet is symmetric
415 // If Src is not a param/return, add it to ValueMap
416 auto SrcIVal = InnerMapping.first;
417 if (hasReadOnlyState(InnerMapping.second))
418 ValueMap[SrcIVal.Val].FromRecords.push_back(
419 ValueSummary::Record{*Dst, SrcIVal.DerefLevel});
420 if (hasWriteOnlyState(InnerMapping.second))
421 ValueMap[SrcIVal.Val].ToRecords.push_back(
422 ValueSummary::Record{*Dst, SrcIVal.DerefLevel});
428 for (const auto &Mapping : ValueMap) {
429 for (const auto &FromRecord : Mapping.second.FromRecords) {
430 for (const auto &ToRecord : Mapping.second.ToRecords) {
431 auto ToLevel = ToRecord.DerefLevel;
432 auto FromLevel = FromRecord.DerefLevel;
433 // Same-level assignments should have already been processed by now
434 if (ToLevel == FromLevel)
437 auto SrcIndex = FromRecord.IValue.Index;
438 auto SrcLevel = FromRecord.IValue.DerefLevel;
439 auto DstIndex = ToRecord.IValue.Index;
440 auto DstLevel = ToRecord.IValue.DerefLevel;
441 if (ToLevel > FromLevel)
442 SrcLevel += ToLevel - FromLevel;
444 DstLevel += FromLevel - ToLevel;
446 ExtRelations.push_back(ExternalRelation{
447 InterfaceValue{SrcIndex, SrcLevel},
448 InterfaceValue{DstIndex, DstLevel}, UnknownOffset});
453 // Remove duplicates in ExtRelations
454 std::sort(ExtRelations.begin(), ExtRelations.end());
455 ExtRelations.erase(std::unique(ExtRelations.begin(), ExtRelations.end()),
459 static void populateExternalAttributes(
460 SmallVectorImpl<ExternalAttribute> &ExtAttributes, const Function &Fn,
461 const SmallVectorImpl<Value *> &RetVals, const AliasAttrMap &AMap) {
462 for (const auto &Mapping : AMap.mappings()) {
463 if (auto IVal = getInterfaceValue(Mapping.first, RetVals)) {
464 auto Attr = getExternallyVisibleAttrs(Mapping.second);
466 ExtAttributes.push_back(ExternalAttribute{*IVal, Attr});
471 CFLAndersAAResult::FunctionInfo::FunctionInfo(
472 const Function &Fn, const SmallVectorImpl<Value *> &RetVals,
473 const ReachabilitySet &ReachSet, const AliasAttrMap &AMap) {
474 populateAttrMap(AttrMap, AMap);
475 populateExternalAttributes(Summary.RetParamAttributes, Fn, RetVals, AMap);
476 populateAliasMap(AliasMap, ReachSet);
477 populateExternalRelations(Summary.RetParamRelations, Fn, RetVals, ReachSet);
481 CFLAndersAAResult::FunctionInfo::getAttrs(const Value *V) const {
482 assert(V != nullptr);
484 auto Itr = AttrMap.find(V);
485 if (Itr != AttrMap.end())
490 bool CFLAndersAAResult::FunctionInfo::mayAlias(const Value *LHS,
493 uint64_t RHSSize) const {
496 // Check if we've seen LHS and RHS before. Sometimes LHS or RHS can be created
497 // after the analysis gets executed, and we want to be conservative in those
499 auto MaybeAttrsA = getAttrs(LHS);
500 auto MaybeAttrsB = getAttrs(RHS);
501 if (!MaybeAttrsA || !MaybeAttrsB)
504 // Check AliasAttrs before AliasMap lookup since it's cheaper
505 auto AttrsA = *MaybeAttrsA;
506 auto AttrsB = *MaybeAttrsB;
507 if (hasUnknownOrCallerAttr(AttrsA))
509 if (hasUnknownOrCallerAttr(AttrsB))
511 if (isGlobalOrArgAttr(AttrsA))
512 return isGlobalOrArgAttr(AttrsB);
513 if (isGlobalOrArgAttr(AttrsB))
514 return isGlobalOrArgAttr(AttrsA);
516 // At this point both LHS and RHS should point to locally allocated objects
518 auto Itr = AliasMap.find(LHS);
519 if (Itr != AliasMap.end()) {
521 // Find out all (X, Offset) where X == RHS
522 auto Comparator = [](OffsetValue LHS, OffsetValue RHS) {
523 return std::less<const Value *>()(LHS.Val, RHS.Val);
525 #ifdef EXPENSIVE_CHECKS
526 assert(std::is_sorted(Itr->second.begin(), Itr->second.end(), Comparator));
528 auto RangePair = std::equal_range(Itr->second.begin(), Itr->second.end(),
529 OffsetValue{RHS, 0}, Comparator);
531 if (RangePair.first != RangePair.second) {
532 // Be conservative about UnknownSize
533 if (LHSSize == MemoryLocation::UnknownSize ||
534 RHSSize == MemoryLocation::UnknownSize)
537 for (const auto &OVal : make_range(RangePair)) {
538 // Be conservative about UnknownOffset
539 if (OVal.Offset == UnknownOffset)
542 // We know that LHS aliases (RHS + OVal.Offset) if the control flow
543 // reaches here. The may-alias query essentially becomes integer
544 // range-overlap queries over two ranges [OVal.Offset, OVal.Offset +
545 // LHSSize) and [0, RHSSize).
547 // Try to be conservative on super large offsets
548 if (LLVM_UNLIKELY(LHSSize > INT64_MAX || RHSSize > INT64_MAX))
551 auto LHSStart = OVal.Offset;
552 // FIXME: Do we need to guard against integer overflow?
553 auto LHSEnd = OVal.Offset + static_cast<int64_t>(LHSSize);
555 auto RHSEnd = static_cast<int64_t>(RHSSize);
556 if (LHSEnd > RHSStart && LHSStart < RHSEnd)
565 static void propagate(InstantiatedValue From, InstantiatedValue To,
566 MatchState State, ReachabilitySet &ReachSet,
567 std::vector<WorkListItem> &WorkList) {
570 if (ReachSet.insert(From, To, State))
571 WorkList.push_back(WorkListItem{From, To, State});
574 static void initializeWorkList(std::vector<WorkListItem> &WorkList,
575 ReachabilitySet &ReachSet,
576 const CFLGraph &Graph) {
577 for (const auto &Mapping : Graph.value_mappings()) {
578 auto Val = Mapping.first;
579 auto &ValueInfo = Mapping.second;
580 assert(ValueInfo.getNumLevels() > 0);
582 // Insert all immediate assignment neighbors to the worklist
583 for (unsigned I = 0, E = ValueInfo.getNumLevels(); I < E; ++I) {
584 auto Src = InstantiatedValue{Val, I};
585 // If there's an assignment edge from X to Y, it means Y is reachable from
586 // X at S2 and X is reachable from Y at S1
587 for (auto &Edge : ValueInfo.getNodeInfoAtLevel(I).Edges) {
588 propagate(Edge.Other, Src, MatchState::FlowFromReadOnly, ReachSet,
590 propagate(Src, Edge.Other, MatchState::FlowToWriteOnly, ReachSet,
597 static Optional<InstantiatedValue> getNodeBelow(const CFLGraph &Graph,
598 InstantiatedValue V) {
599 auto NodeBelow = InstantiatedValue{V.Val, V.DerefLevel + 1};
600 if (Graph.getNode(NodeBelow))
605 static void processWorkListItem(const WorkListItem &Item, const CFLGraph &Graph,
606 ReachabilitySet &ReachSet, AliasMemSet &MemSet,
607 std::vector<WorkListItem> &WorkList) {
608 auto FromNode = Item.From;
609 auto ToNode = Item.To;
611 auto NodeInfo = Graph.getNode(ToNode);
612 assert(NodeInfo != nullptr);
614 // TODO: propagate field offsets
616 // FIXME: Here is a neat trick we can do: since both ReachSet and MemSet holds
617 // relations that are symmetric, we could actually cut the storage by half by
618 // sorting FromNode and ToNode before insertion happens.
620 // The newly added value alias pair may pontentially generate more memory
621 // alias pairs. Check for them here.
622 auto FromNodeBelow = getNodeBelow(Graph, FromNode);
623 auto ToNodeBelow = getNodeBelow(Graph, ToNode);
624 if (FromNodeBelow && ToNodeBelow &&
625 MemSet.insert(*FromNodeBelow, *ToNodeBelow)) {
626 propagate(*FromNodeBelow, *ToNodeBelow,
627 MatchState::FlowFromMemAliasNoReadWrite, ReachSet, WorkList);
628 for (const auto &Mapping : ReachSet.reachableValueAliases(*FromNodeBelow)) {
629 auto Src = Mapping.first;
630 auto MemAliasPropagate = [&](MatchState FromState, MatchState ToState) {
631 if (Mapping.second.test(static_cast<size_t>(FromState)))
632 propagate(Src, *ToNodeBelow, ToState, ReachSet, WorkList);
635 MemAliasPropagate(MatchState::FlowFromReadOnly,
636 MatchState::FlowFromMemAliasReadOnly);
637 MemAliasPropagate(MatchState::FlowToWriteOnly,
638 MatchState::FlowToMemAliasWriteOnly);
639 MemAliasPropagate(MatchState::FlowToReadWrite,
640 MatchState::FlowToMemAliasReadWrite);
644 // This is the core of the state machine walking algorithm. We expand ReachSet
645 // based on which state we are at (which in turn dictates what edges we
647 // From a high-level point of view, the state machine here guarantees two
649 // - If *X and *Y are memory aliases, then X and Y are value aliases
650 // - If Y is an alias of X, then reverse assignment edges (if there is any)
651 // should precede any assignment edges on the path from X to Y.
652 auto NextAssignState = [&](MatchState State) {
653 for (const auto &AssignEdge : NodeInfo->Edges)
654 propagate(FromNode, AssignEdge.Other, State, ReachSet, WorkList);
656 auto NextRevAssignState = [&](MatchState State) {
657 for (const auto &RevAssignEdge : NodeInfo->ReverseEdges)
658 propagate(FromNode, RevAssignEdge.Other, State, ReachSet, WorkList);
660 auto NextMemState = [&](MatchState State) {
661 if (auto AliasSet = MemSet.getMemoryAliases(ToNode)) {
662 for (const auto &MemAlias : *AliasSet)
663 propagate(FromNode, MemAlias, State, ReachSet, WorkList);
667 switch (Item.State) {
668 case MatchState::FlowFromReadOnly: {
669 NextRevAssignState(MatchState::FlowFromReadOnly);
670 NextAssignState(MatchState::FlowToReadWrite);
671 NextMemState(MatchState::FlowFromMemAliasReadOnly);
674 case MatchState::FlowFromMemAliasNoReadWrite: {
675 NextRevAssignState(MatchState::FlowFromReadOnly);
676 NextAssignState(MatchState::FlowToWriteOnly);
679 case MatchState::FlowFromMemAliasReadOnly: {
680 NextRevAssignState(MatchState::FlowFromReadOnly);
681 NextAssignState(MatchState::FlowToReadWrite);
684 case MatchState::FlowToWriteOnly: {
685 NextAssignState(MatchState::FlowToWriteOnly);
686 NextMemState(MatchState::FlowToMemAliasWriteOnly);
689 case MatchState::FlowToReadWrite: {
690 NextAssignState(MatchState::FlowToReadWrite);
691 NextMemState(MatchState::FlowToMemAliasReadWrite);
694 case MatchState::FlowToMemAliasWriteOnly: {
695 NextAssignState(MatchState::FlowToWriteOnly);
698 case MatchState::FlowToMemAliasReadWrite: {
699 NextAssignState(MatchState::FlowToReadWrite);
705 static AliasAttrMap buildAttrMap(const CFLGraph &Graph,
706 const ReachabilitySet &ReachSet) {
707 AliasAttrMap AttrMap;
708 std::vector<InstantiatedValue> WorkList, NextList;
710 // Initialize each node with its original AliasAttrs in CFLGraph
711 for (const auto &Mapping : Graph.value_mappings()) {
712 auto Val = Mapping.first;
713 auto &ValueInfo = Mapping.second;
714 for (unsigned I = 0, E = ValueInfo.getNumLevels(); I < E; ++I) {
715 auto Node = InstantiatedValue{Val, I};
716 AttrMap.add(Node, ValueInfo.getNodeInfoAtLevel(I).Attr);
717 WorkList.push_back(Node);
721 while (!WorkList.empty()) {
722 for (const auto &Dst : WorkList) {
723 auto DstAttr = AttrMap.getAttrs(Dst);
727 // Propagate attr on the same level
728 for (const auto &Mapping : ReachSet.reachableValueAliases(Dst)) {
729 auto Src = Mapping.first;
730 if (AttrMap.add(Src, DstAttr))
731 NextList.push_back(Src);
734 // Propagate attr to the levels below
735 auto DstBelow = getNodeBelow(Graph, Dst);
737 if (AttrMap.add(*DstBelow, DstAttr)) {
738 NextList.push_back(*DstBelow);
741 DstBelow = getNodeBelow(Graph, *DstBelow);
744 WorkList.swap(NextList);
751 CFLAndersAAResult::FunctionInfo
752 CFLAndersAAResult::buildInfoFrom(const Function &Fn) {
753 CFLGraphBuilder<CFLAndersAAResult> GraphBuilder(
755 // Cast away the constness here due to GraphBuilder's API requirement
756 const_cast<Function &>(Fn));
757 auto &Graph = GraphBuilder.getCFLGraph();
759 ReachabilitySet ReachSet;
762 std::vector<WorkListItem> WorkList, NextList;
763 initializeWorkList(WorkList, ReachSet, Graph);
764 // TODO: make sure we don't stop before the fix point is reached
765 while (!WorkList.empty()) {
766 for (const auto &Item : WorkList)
767 processWorkListItem(Item, Graph, ReachSet, MemSet, NextList);
769 NextList.swap(WorkList);
773 // Now that we have all the reachability info, propagate AliasAttrs according
775 auto IValueAttrMap = buildAttrMap(Graph, ReachSet);
777 return FunctionInfo(Fn, GraphBuilder.getReturnValues(), ReachSet,
778 std::move(IValueAttrMap));
781 void CFLAndersAAResult::scan(const Function &Fn) {
782 auto InsertPair = Cache.insert(std::make_pair(&Fn, Optional<FunctionInfo>()));
784 assert(InsertPair.second &&
785 "Trying to scan a function that has already been cached");
787 // Note that we can't do Cache[Fn] = buildSetsFrom(Fn) here: the function call
788 // may get evaluated after operator[], potentially triggering a DenseMap
789 // resize and invalidating the reference returned by operator[]
790 auto FunInfo = buildInfoFrom(Fn);
791 Cache[&Fn] = std::move(FunInfo);
792 Handles.push_front(FunctionHandle(const_cast<Function *>(&Fn), this));
795 void CFLAndersAAResult::evict(const Function &Fn) { Cache.erase(&Fn); }
797 const Optional<CFLAndersAAResult::FunctionInfo> &
798 CFLAndersAAResult::ensureCached(const Function &Fn) {
799 auto Iter = Cache.find(&Fn);
800 if (Iter == Cache.end()) {
802 Iter = Cache.find(&Fn);
803 assert(Iter != Cache.end());
804 assert(Iter->second.hasValue());
809 const AliasSummary *CFLAndersAAResult::getAliasSummary(const Function &Fn) {
810 auto &FunInfo = ensureCached(Fn);
811 if (FunInfo.hasValue())
812 return &FunInfo->getAliasSummary();
817 AliasResult CFLAndersAAResult::query(const MemoryLocation &LocA,
818 const MemoryLocation &LocB) {
819 auto *ValA = LocA.Ptr;
820 auto *ValB = LocB.Ptr;
822 if (!ValA->getType()->isPointerTy() || !ValB->getType()->isPointerTy())
825 auto *Fn = parentFunctionOfValue(ValA);
827 Fn = parentFunctionOfValue(ValB);
829 // The only times this is known to happen are when globals + InlineAsm are
832 << "CFLAndersAA: could not extract parent function information.\n");
836 assert(!parentFunctionOfValue(ValB) || parentFunctionOfValue(ValB) == Fn);
839 assert(Fn != nullptr);
840 auto &FunInfo = ensureCached(*Fn);
843 if (FunInfo->mayAlias(ValA, LocA.Size, ValB, LocB.Size))
848 AliasResult CFLAndersAAResult::alias(const MemoryLocation &LocA,
849 const MemoryLocation &LocB) {
850 if (LocA.Ptr == LocB.Ptr)
851 return LocA.Size == LocB.Size ? MustAlias : PartialAlias;
853 // Comparisons between global variables and other constants should be
854 // handled by BasicAA.
855 // CFLAndersAA may report NoAlias when comparing a GlobalValue and
856 // ConstantExpr, but every query needs to have at least one Value tied to a
857 // Function, and neither GlobalValues nor ConstantExprs are.
858 if (isa<Constant>(LocA.Ptr) && isa<Constant>(LocB.Ptr))
859 return AAResultBase::alias(LocA, LocB);
861 AliasResult QueryResult = query(LocA, LocB);
862 if (QueryResult == MayAlias)
863 return AAResultBase::alias(LocA, LocB);
868 AnalysisKey CFLAndersAA::Key;
870 CFLAndersAAResult CFLAndersAA::run(Function &F, FunctionAnalysisManager &AM) {
871 return CFLAndersAAResult(AM.getResult<TargetLibraryAnalysis>(F));
874 char CFLAndersAAWrapperPass::ID = 0;
875 INITIALIZE_PASS(CFLAndersAAWrapperPass, "cfl-anders-aa",
876 "Inclusion-Based CFL Alias Analysis", false, true)
878 ImmutablePass *llvm::createCFLAndersAAWrapperPass() {
879 return new CFLAndersAAWrapperPass();
882 CFLAndersAAWrapperPass::CFLAndersAAWrapperPass() : ImmutablePass(ID) {
883 initializeCFLAndersAAWrapperPassPass(*PassRegistry::getPassRegistry());
886 void CFLAndersAAWrapperPass::initializePass() {
887 auto &TLIWP = getAnalysis<TargetLibraryInfoWrapperPass>();
888 Result.reset(new CFLAndersAAResult(TLIWP.getTLI()));
891 void CFLAndersAAWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
892 AU.setPreservesAll();
893 AU.addRequired<TargetLibraryInfoWrapperPass>();