1 //=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines ExprEngine's support for calls and returns.
12 //===----------------------------------------------------------------------===//
14 #define DEBUG_TYPE "ExprEngine"
16 #include "clang/Analysis/Analyses/LiveVariables.h"
17 #include "clang/StaticAnalyzer/Core/CheckerManager.h"
18 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
19 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
20 #include "clang/AST/CXXInheritance.h"
21 #include "clang/AST/DeclCXX.h"
22 #include "clang/AST/ParentMap.h"
23 #include "llvm/ADT/SmallSet.h"
24 #include "llvm/ADT/Statistic.h"
25 #include "llvm/Support/SaveAndRestore.h"
27 using namespace clang;
30 STATISTIC(NumOfDynamicDispatchPathSplits,
31 "The # of times we split the path due to imprecise dynamic dispatch info");
33 STATISTIC(NumInlinedCalls,
34 "The # of times we inlined a call");
36 void ExprEngine::processCallEnter(CallEnter CE, ExplodedNode *Pred) {
37 // Get the entry block in the CFG of the callee.
38 const StackFrameContext *calleeCtx = CE.getCalleeContext();
39 const CFG *CalleeCFG = calleeCtx->getCFG();
40 const CFGBlock *Entry = &(CalleeCFG->getEntry());
43 assert(Entry->empty());
44 assert(Entry->succ_size() == 1);
46 // Get the solitary sucessor.
47 const CFGBlock *Succ = *(Entry->succ_begin());
49 // Construct an edge representing the starting location in the callee.
50 BlockEdge Loc(Entry, Succ, calleeCtx);
52 ProgramStateRef state = Pred->getState();
54 // Construct a new node and add it to the worklist.
56 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
57 Node->addPredecessor(Pred, G);
59 Engine.getWorkList()->enqueue(Node);
62 // Find the last statement on the path to the exploded node and the
63 // corresponding Block.
64 static std::pair<const Stmt*,
65 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
67 const StackFrameContext *SF =
68 Node->getLocation().getLocationContext()->getCurrentStackFrame();
70 // Back up through the ExplodedGraph until we reach a statement node in this
73 const ProgramPoint &PP = Node->getLocation();
75 if (PP.getLocationContext()->getCurrentStackFrame() == SF) {
76 if (const StmtPoint *SP = dyn_cast<StmtPoint>(&PP)) {
79 } else if (const CallExitEnd *CEE = dyn_cast<CallExitEnd>(&PP)) {
80 S = CEE->getCalleeContext()->getCallSite();
84 // If there is no statement, this is an implicitly-generated call.
85 // We'll walk backwards over it and then continue the loop to find
86 // an actual statement.
89 Node = Node->getFirstPred();
90 CE = Node->getLocationAs<CallEnter>();
91 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
93 // Continue searching the graph.
95 } else if (const CallEnter *CE = dyn_cast<CallEnter>(&PP)) {
96 // If we reached the CallEnter for this function, it has no statements.
97 if (CE->getCalleeContext() == SF)
101 if (Node->pred_empty())
102 return std::pair<const Stmt*, const CFGBlock*>((Stmt*)0, (CFGBlock*)0);
104 Node = *Node->pred_begin();
107 const CFGBlock *Blk = 0;
109 // Now, get the enclosing basic block.
111 const ProgramPoint &PP = Node->getLocation();
112 if (isa<BlockEdge>(PP) &&
113 (PP.getLocationContext()->getCurrentStackFrame() == SF)) {
114 BlockEdge &EPP = cast<BlockEdge>(PP);
118 if (Node->pred_empty())
119 return std::pair<const Stmt*, const CFGBlock*>(S, (CFGBlock*)0);
121 Node = *Node->pred_begin();
125 return std::pair<const Stmt*, const CFGBlock*>(S, Blk);
128 /// Adjusts a return value when the called function's return type does not
129 /// match the caller's expression type. This can happen when a dynamic call
130 /// is devirtualized, and the overridding method has a covariant (more specific)
131 /// return type than the parent's method. For C++ objects, this means we need
132 /// to add base casts.
133 static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
134 StoreManager &StoreMgr) {
135 // For now, the only adjustments we handle apply only to locations.
139 // If the types already match, don't do any unnecessary work.
140 ExpectedTy = ExpectedTy.getCanonicalType();
141 ActualTy = ActualTy.getCanonicalType();
142 if (ExpectedTy == ActualTy)
145 // No adjustment is needed between Objective-C pointer types.
146 if (ExpectedTy->isObjCObjectPointerType() &&
147 ActualTy->isObjCObjectPointerType())
150 // C++ object pointers may need "derived-to-base" casts.
151 const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
152 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
153 if (ExpectedClass && ActualClass) {
154 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
155 /*DetectVirtual=*/false);
156 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
157 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
158 return StoreMgr.evalDerivedToBase(V, Paths.front());
162 // Unfortunately, Objective-C does not enforce that overridden methods have
163 // covariant return types, so we can't assert that that never happens.
164 // Be safe and return UnknownVal().
168 void ExprEngine::removeDeadOnEndOfFunction(NodeBuilderContext& BC,
170 ExplodedNodeSet &Dst) {
171 NodeBuilder Bldr(Pred, Dst, BC);
173 // Find the last statement in the function and the corresponding basic block.
174 const Stmt *LastSt = 0;
175 const CFGBlock *Blk = 0;
176 llvm::tie(LastSt, Blk) = getLastStmt(Pred);
177 if (!Blk || !LastSt) {
181 // If the last statement is return, everything it references should stay live.
182 if (isa<ReturnStmt>(LastSt))
185 // Here, we call the Symbol Reaper with 0 stack context telling it to clean up
186 // everything on the stack. We use LastStmt as a diagnostic statement, with
187 // which the PreStmtPurgeDead point will be associated.
189 removeDead(Pred, Dst, 0, 0, LastSt,
190 ProgramPoint::PostStmtPurgeDeadSymbolsKind);
194 static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
195 const StackFrameContext *calleeCtx) {
196 const Decl *RuntimeCallee = calleeCtx->getDecl();
197 const Decl *StaticDecl = Call->getDecl();
198 assert(RuntimeCallee);
201 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
204 /// The call exit is simulated with a sequence of nodes, which occur between
205 /// CallExitBegin and CallExitEnd. The following operations occur between the
206 /// two program points:
207 /// 1. CallExitBegin (triggers the start of call exit sequence)
208 /// 2. Bind the return value
209 /// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
210 /// 4. CallExitEnd (switch to the caller context)
211 /// 5. PostStmt<CallExpr>
212 void ExprEngine::processCallExit(ExplodedNode *CEBNode) {
213 // Step 1 CEBNode was generated before the call.
215 const StackFrameContext *calleeCtx =
216 CEBNode->getLocationContext()->getCurrentStackFrame();
218 // The parent context might not be a stack frame, so make sure we
219 // look up the first enclosing stack frame.
220 const StackFrameContext *callerCtx =
221 calleeCtx->getParent()->getCurrentStackFrame();
223 const Stmt *CE = calleeCtx->getCallSite();
224 ProgramStateRef state = CEBNode->getState();
225 // Find the last statement in the function and the corresponding basic block.
226 const Stmt *LastSt = 0;
227 const CFGBlock *Blk = 0;
228 llvm::tie(LastSt, Blk) = getLastStmt(CEBNode);
230 // Generate a CallEvent /before/ cleaning the state, so that we can get the
231 // correct value for 'this' (if necessary).
232 CallEventManager &CEMgr = getStateManager().getCallEventManager();
233 CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
235 // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
237 // If the callee returns an expression, bind its value to CallExpr.
239 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
240 const LocationContext *LCtx = CEBNode->getLocationContext();
241 SVal V = state->getSVal(RS, LCtx);
243 // Ensure that the return type matches the type of the returned Expr.
244 if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
245 QualType ReturnedTy =
246 CallEvent::getDeclaredResultType(calleeCtx->getDecl());
247 if (!ReturnedTy.isNull()) {
248 if (const Expr *Ex = dyn_cast<Expr>(CE)) {
249 V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
255 state = state->BindExpr(CE, callerCtx, V);
258 // Bind the constructed object value to CXXConstructExpr.
259 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
260 loc::MemRegionVal This =
261 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
262 SVal ThisV = state->getSVal(This);
264 // If the constructed object is a prvalue, get its bindings.
265 // Note that we have to be careful here because constructors embedded
266 // in DeclStmts are not marked as lvalues.
267 if (!CCE->isGLValue())
268 if (const MemRegion *MR = ThisV.getAsRegion())
269 if (isa<CXXTempObjectRegion>(MR))
270 ThisV = state->getSVal(cast<Loc>(ThisV));
272 state = state->BindExpr(CCE, callerCtx, ThisV);
276 // Step 3: BindedRetNode -> CleanedNodes
277 // If we can find a statement and a block in the inlined function, run remove
278 // dead bindings before returning from the call. This is important to ensure
279 // that we report the issues such as leaks in the stack contexts in which
281 ExplodedNodeSet CleanedNodes;
282 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
283 static SimpleProgramPointTag retValBind("ExprEngine : Bind Return Value");
284 PostStmt Loc(LastSt, calleeCtx, &retValBind);
286 ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
287 BindedRetNode->addPredecessor(CEBNode, G);
291 NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
293 // Here, we call the Symbol Reaper with 0 statement and caller location
294 // context, telling it to clean up everything in the callee's context
295 // (and it's children). We use LastStmt as a diagnostic statement, which
296 // which the PreStmtPurge Dead point will be associated.
297 removeDead(BindedRetNode, CleanedNodes, 0, callerCtx, LastSt,
298 ProgramPoint::PostStmtPurgeDeadSymbolsKind);
301 CleanedNodes.Add(CEBNode);
304 for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
305 E = CleanedNodes.end(); I != E; ++I) {
307 // Step 4: Generate the CallExit and leave the callee's context.
308 // CleanedNodes -> CEENode
309 CallExitEnd Loc(calleeCtx, callerCtx);
311 ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
312 ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
313 CEENode->addPredecessor(*I, G);
317 // Step 5: Perform the post-condition check of the CallExpr and enqueue the
318 // result onto the work list.
319 // CEENode -> Dst -> WorkList
320 NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
321 SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
323 SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
325 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
327 ExplodedNodeSet DstPostCall;
328 getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
330 /*WasInlined=*/true);
333 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
334 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
336 /*WasInlined=*/true);
338 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
339 *this, /*WasInlined=*/true);
341 Dst.insert(DstPostCall);
344 // Enqueue the next element in the block.
345 for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
347 Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
348 calleeCtx->getIndex()+1);
353 void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
354 bool &IsRecursive, unsigned &StackDepth) {
359 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
360 const Decl *DI = SFC->getDecl();
362 // Mark recursive (and mutually recursive) functions and always count
363 // them when measuring the stack depth.
367 LCtx = LCtx->getParent();
371 // Do not count the small functions when determining the stack depth.
372 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
373 const CFG *CalleeCFG = CalleeADC->getCFG();
374 if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
377 LCtx = LCtx->getParent();
382 static bool IsInStdNamespace(const FunctionDecl *FD) {
383 const DeclContext *DC = FD->getEnclosingNamespaceContext();
384 const NamespaceDecl *ND = dyn_cast<NamespaceDecl>(DC);
388 while (const DeclContext *Parent = ND->getParent()) {
389 if (!isa<NamespaceDecl>(Parent))
391 ND = cast<NamespaceDecl>(Parent);
394 return ND->getName() == "std";
397 // Determine if we should inline the call.
398 bool ExprEngine::shouldInlineDecl(const Decl *D, ExplodedNode *Pred) {
399 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
400 const CFG *CalleeCFG = CalleeADC->getCFG();
402 // It is possible that the CFG cannot be constructed.
403 // Be safe, and check if the CalleeCFG is valid.
407 bool IsRecursive = false;
408 unsigned StackDepth = 0;
409 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
410 if ((StackDepth >= AMgr.options.InlineMaxStackDepth) &&
411 ((CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
415 if (Engine.FunctionSummaries->hasReachedMaxBlockCount(D))
418 if (CalleeCFG->getNumBlockIDs() > AMgr.options.InlineMaxFunctionSize)
421 // Do not inline variadic calls (for now).
422 if (const BlockDecl *BD = dyn_cast<BlockDecl>(D)) {
423 if (BD->isVariadic())
426 else if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
427 if (FD->isVariadic())
431 if (getContext().getLangOpts().CPlusPlus) {
432 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
433 // Conditionally allow the inlining of template functions.
434 if (!getAnalysisManager().options.mayInlineTemplateFunctions())
435 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
438 // Conditionally allow the inlining of C++ standard library functions.
439 if (!getAnalysisManager().options.mayInlineCXXStandardLibrary())
440 if (getContext().getSourceManager().isInSystemHeader(FD->getLocation()))
441 if (IsInStdNamespace(FD))
446 // It is possible that the live variables analysis cannot be
447 // run. If so, bail out.
448 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
454 // The GDM component containing the dynamic dispatch bifurcation info. When
455 // the exact type of the receiver is not known, we want to explore both paths -
456 // one on which we do inline it and the other one on which we don't. This is
457 // done to ensure we do not drop coverage.
458 // This is the map from the receiver region to a bool, specifying either we
459 // consider this region's information precise or not along the given path.
461 enum DynamicDispatchMode {
462 DynamicDispatchModeInlined = 1,
463 DynamicDispatchModeConservative
466 REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
467 CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *,
470 bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
471 NodeBuilder &Bldr, ExplodedNode *Pred,
472 ProgramStateRef State) {
475 const LocationContext *CurLC = Pred->getLocationContext();
476 const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
477 const LocationContext *ParentOfCallee = 0;
479 AnalyzerOptions &Opts = getAnalysisManager().options;
481 // FIXME: Refactor this check into a hypothetical CallEvent::canInline.
482 switch (Call.getKind()) {
486 case CE_CXXMemberOperator:
487 if (!Opts.mayInlineCXXMemberFunction(CIMK_MemberFunctions))
490 case CE_CXXConstructor: {
491 if (!Opts.mayInlineCXXMemberFunction(CIMK_Constructors))
494 const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
496 // FIXME: We don't handle constructors or destructors for arrays properly.
497 const MemRegion *Target = Ctor.getCXXThisVal().getAsRegion();
498 if (Target && isa<ElementRegion>(Target))
501 // FIXME: This is a hack. We don't use the correct region for a new
502 // expression, so if we inline the constructor its result will just be
503 // thrown away. This short-term hack is tracked in <rdar://problem/12180598>
504 // and the longer-term possible fix is discussed in PR12014.
505 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
506 if (const Stmt *Parent = CurLC->getParentMap().getParent(CtorExpr))
507 if (isa<CXXNewExpr>(Parent))
510 // Inlining constructors requires including initializers in the CFG.
511 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
512 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
515 // If the destructor is trivial, it's always safe to inline the constructor.
516 if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
519 // For other types, only inline constructors if destructor inlining is
521 if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
524 // FIXME: This is a hack. We don't handle temporary destructors
525 // right now, so we shouldn't inline their constructors.
526 if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete)
527 if (!Target || !isa<DeclRegion>(Target))
532 case CE_CXXDestructor: {
533 if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
536 // Inlining destructors requires building the CFG correctly.
537 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
538 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
541 const CXXDestructorCall &Dtor = cast<CXXDestructorCall>(Call);
543 // FIXME: We don't handle constructors or destructors for arrays properly.
544 const MemRegion *Target = Dtor.getCXXThisVal().getAsRegion();
545 if (Target && isa<ElementRegion>(Target))
550 case CE_CXXAllocator:
551 // Do not inline allocators until we model deallocators.
552 // This is unfortunate, but basically necessary for smart pointers and such.
555 const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
556 assert(BR && "If we have the block definition we should have its region");
557 AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
558 ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
564 if (!Opts.mayInlineObjCMethod())
566 if (!(getAnalysisManager().options.IPAMode == DynamicDispatch ||
567 getAnalysisManager().options.IPAMode == DynamicDispatchBifurcate))
572 if (!shouldInlineDecl(D, Pred))
576 ParentOfCallee = CallerSFC;
578 // This may be NULL, but that's fine.
579 const Expr *CallE = Call.getOriginExpr();
581 // Construct a new stack frame for the callee.
582 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
583 const StackFrameContext *CalleeSFC =
584 CalleeADC->getStackFrame(ParentOfCallee, CallE,
585 currBldrCtx->getBlock(),
588 CallEnter Loc(CallE, CalleeSFC, CurLC);
590 // Construct a new state which contains the mapping from actual to
592 State = State->enterStackFrame(Call, CalleeSFC);
595 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
596 N->addPredecessor(Pred, G);
598 Engine.getWorkList()->enqueue(N);
601 // If we decided to inline the call, the successor has been manually
602 // added onto the work list so remove it from the node builder.
603 Bldr.takeNodes(Pred);
607 // Mark the decl as visited.
609 VisitedCallees->insert(D);
614 static ProgramStateRef getInlineFailedState(ProgramStateRef State,
616 void *ReplayState = State->get<ReplayWithoutInlining>();
620 assert(ReplayState == (const void*)CallE && "Backtracked to the wrong call.");
623 return State->remove<ReplayWithoutInlining>();
626 void ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred,
627 ExplodedNodeSet &dst) {
628 // Perform the previsit of the CallExpr.
629 ExplodedNodeSet dstPreVisit;
630 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
632 // Get the call in its initial state. We use this as a template to perform
634 CallEventManager &CEMgr = getStateManager().getCallEventManager();
635 CallEventRef<> CallTemplate
636 = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
638 // Evaluate the function call. We try each of the checkers
639 // to see if the can evaluate the function call.
640 ExplodedNodeSet dstCallEvaluated;
641 for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
643 evalCall(dstCallEvaluated, *I, *CallTemplate);
646 // Finally, perform the post-condition check of the CallExpr and store
647 // the created nodes in 'Dst'.
648 // Note that if the call was inlined, dstCallEvaluated will be empty.
649 // The post-CallExpr check will occur in processCallExit.
650 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
654 void ExprEngine::evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred,
655 const CallEvent &Call) {
656 // WARNING: At this time, the state attached to 'Call' may be older than the
657 // state in 'Pred'. This is a minor optimization since CheckerManager will
658 // use an updated CallEvent instance when calling checkers, but if 'Call' is
659 // ever used directly in this function all callers should be updated to pass
660 // the most recent state. (It is probably not worth doing the work here since
661 // for some callers this will not be necessary.)
663 // Run any pre-call checks using the generic call interface.
664 ExplodedNodeSet dstPreVisit;
665 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, Call, *this);
667 // Actually evaluate the function call. We try each of the checkers
668 // to see if the can evaluate the function call, and get a callback at
669 // defaultEvalCall if all of them fail.
670 ExplodedNodeSet dstCallEvaluated;
671 getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
674 // Finally, run any post-call checks.
675 getCheckerManager().runCheckersForPostCall(Dst, dstCallEvaluated,
679 ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call,
680 const LocationContext *LCtx,
681 ProgramStateRef State) {
682 const Expr *E = Call.getOriginExpr();
686 // Some method families have known return values.
687 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
688 switch (Msg->getMethodFamily()) {
691 case OMF_autorelease:
694 // These methods return their receivers.
695 return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
698 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
699 return State->BindExpr(E, LCtx, C->getCXXThisVal());
702 // Conjure a symbol if the return value is unknown.
703 QualType ResultTy = Call.getResultType();
704 SValBuilder &SVB = getSValBuilder();
705 unsigned Count = currBldrCtx->blockCount();
706 SVal R = SVB.conjureSymbolVal(0, E, LCtx, ResultTy, Count);
707 return State->BindExpr(E, LCtx, R);
710 // Conservatively evaluate call by invalidating regions and binding
711 // a conjured return value.
712 void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
713 ExplodedNode *Pred, ProgramStateRef State) {
714 State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
715 State = bindReturnValue(Call, Pred->getLocationContext(), State);
717 // And make the result node.
718 Bldr.generateNode(Call.getProgramPoint(), State, Pred);
721 void ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred,
722 const CallEvent &CallTemplate) {
723 // Make sure we have the most recent state attached to the call.
724 ProgramStateRef State = Pred->getState();
725 CallEventRef<> Call = CallTemplate.cloneWithState(State);
727 if (!getAnalysisManager().shouldInlineCall()) {
728 conservativeEvalCall(*Call, Bldr, Pred, State);
731 // Try to inline the call.
732 // The origin expression here is just used as a kind of checksum;
733 // this should still be safe even for CallEvents that don't come from exprs.
734 const Expr *E = Call->getOriginExpr();
735 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
737 if (InlinedFailedState) {
738 // If we already tried once and failed, make sure we don't retry later.
739 State = InlinedFailedState;
741 RuntimeDefinition RD = Call->getRuntimeDefinition();
742 const Decl *D = RD.getDecl();
744 if (RD.mayHaveOtherDefinitions()) {
745 // Explore with and without inlining the call.
746 if (getAnalysisManager().options.IPAMode == DynamicDispatchBifurcate) {
747 BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
751 // Don't inline if we're not in any dynamic dispatch mode.
752 if (getAnalysisManager().options.IPAMode != DynamicDispatch) {
753 conservativeEvalCall(*Call, Bldr, Pred, State);
758 // We are not bifurcating and we do have a Decl, so just inline.
759 if (inlineCall(*Call, D, Bldr, Pred, State))
764 // If we can't inline it, handle the return value and invalidate the regions.
765 conservativeEvalCall(*Call, Bldr, Pred, State);
768 void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
769 const CallEvent &Call, const Decl *D,
770 NodeBuilder &Bldr, ExplodedNode *Pred) {
772 BifurReg = BifurReg->StripCasts();
774 // Check if we've performed the split already - note, we only want
775 // to split the path once per memory region.
776 ProgramStateRef State = Pred->getState();
777 const unsigned *BState =
778 State->get<DynamicDispatchBifurcationMap>(BifurReg);
780 // If we are on "inline path", keep inlining if possible.
781 if (*BState == DynamicDispatchModeInlined)
782 if (inlineCall(Call, D, Bldr, Pred, State))
784 // If inline failed, or we are on the path where we assume we
785 // don't have enough info about the receiver to inline, conjure the
786 // return value and invalidate the regions.
787 conservativeEvalCall(Call, Bldr, Pred, State);
791 // If we got here, this is the first time we process a message to this
792 // region, so split the path.
793 ProgramStateRef IState =
794 State->set<DynamicDispatchBifurcationMap>(BifurReg,
795 DynamicDispatchModeInlined);
796 inlineCall(Call, D, Bldr, Pred, IState);
798 ProgramStateRef NoIState =
799 State->set<DynamicDispatchBifurcationMap>(BifurReg,
800 DynamicDispatchModeConservative);
801 conservativeEvalCall(Call, Bldr, Pred, NoIState);
803 NumOfDynamicDispatchPathSplits++;
808 void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
809 ExplodedNodeSet &Dst) {
811 ExplodedNodeSet dstPreVisit;
812 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
814 StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
816 if (RS->getRetValue()) {
817 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
818 ei = dstPreVisit.end(); it != ei; ++it) {
819 B.generateNode(RS, *it, (*it)->getState());