1 //=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines ExprEngine's support for calls and returns.
12 //===----------------------------------------------------------------------===//
14 #include "clang/StaticAnalyzer/Core/CheckerManager.h"
15 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
16 #include "clang/StaticAnalyzer/Core/PathSensitive/ObjCMessage.h"
17 #include "clang/AST/DeclCXX.h"
18 #include "llvm/ADT/SmallSet.h"
19 #include "llvm/Support/SaveAndRestore.h"
21 using namespace clang;
24 void ExprEngine::processCallEnter(CallEnter CE, ExplodedNode *Pred) {
25 // Get the entry block in the CFG of the callee.
26 const StackFrameContext *calleeCtx = CE.getCalleeContext();
27 const CFG *CalleeCFG = calleeCtx->getCFG();
28 const CFGBlock *Entry = &(CalleeCFG->getEntry());
31 assert(Entry->empty());
32 assert(Entry->succ_size() == 1);
34 // Get the solitary sucessor.
35 const CFGBlock *Succ = *(Entry->succ_begin());
37 // Construct an edge representing the starting location in the callee.
38 BlockEdge Loc(Entry, Succ, calleeCtx);
40 // Construct a new state which contains the mapping from actual to
42 const LocationContext *callerCtx = Pred->getLocationContext();
43 ProgramStateRef state = Pred->getState()->enterStackFrame(callerCtx,
46 // Construct a new node and add it to the worklist.
48 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
49 Node->addPredecessor(Pred, G);
51 Engine.getWorkList()->enqueue(Node);
54 static const ReturnStmt *getReturnStmt(const ExplodedNode *Node) {
56 const ProgramPoint &PP = Node->getLocation();
57 // Skip any BlockEdges.
58 if (isa<BlockEdge>(PP) || isa<CallExit>(PP)) {
59 assert(Node->pred_size() == 1);
60 Node = *Node->pred_begin();
63 if (const StmtPoint *SP = dyn_cast<StmtPoint>(&PP)) {
64 const Stmt *S = SP->getStmt();
65 return dyn_cast<ReturnStmt>(S);
72 void ExprEngine::processCallExit(ExplodedNode *Pred) {
73 ProgramStateRef state = Pred->getState();
74 const StackFrameContext *calleeCtx =
75 Pred->getLocationContext()->getCurrentStackFrame();
76 const LocationContext *callerCtx = calleeCtx->getParent();
77 const Stmt *CE = calleeCtx->getCallSite();
79 // If the callee returns an expression, bind its value to CallExpr.
80 if (const ReturnStmt *RS = getReturnStmt(Pred)) {
81 const LocationContext *LCtx = Pred->getLocationContext();
82 SVal V = state->getSVal(RS, LCtx);
83 state = state->BindExpr(CE, callerCtx, V);
86 // Bind the constructed object value to CXXConstructExpr.
87 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
88 const CXXThisRegion *ThisR =
89 getCXXThisRegion(CCE->getConstructor()->getParent(), calleeCtx);
91 SVal ThisV = state->getSVal(ThisR);
92 // Always bind the region to the CXXConstructExpr.
93 state = state->BindExpr(CCE, Pred->getLocationContext(), ThisV);
96 static SimpleProgramPointTag returnTag("ExprEngine : Call Return");
97 PostStmt Loc(CE, callerCtx, &returnTag);
99 ExplodedNode *N = G.getNode(Loc, state, false, &isNew);
100 N->addPredecessor(Pred, G);
104 // Perform the post-condition check of the CallExpr.
106 NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), N);
107 SaveAndRestore<const NodeBuilderContext*> NBCSave(currentBuilderContext,
109 SaveAndRestore<unsigned> CBISave(currentStmtIdx, calleeCtx->getIndex());
111 getCheckerManager().runCheckersForPostStmt(Dst, N, CE, *this,
112 /* wasInlined */ true);
114 // Enqueue the next element in the block.
115 for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); I != E; ++I) {
116 Engine.getWorkList()->enqueue(*I,
117 calleeCtx->getCallSiteBlock(),
118 calleeCtx->getIndex()+1);
122 static unsigned getNumberStackFrames(const LocationContext *LCtx) {
125 if (isa<StackFrameContext>(LCtx))
127 LCtx = LCtx->getParent();
132 // Determine if we should inline the call.
133 bool ExprEngine::shouldInlineDecl(const FunctionDecl *FD, ExplodedNode *Pred) {
134 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(FD);
135 const CFG *CalleeCFG = CalleeADC->getCFG();
137 // It is possible that the CFG cannot be constructed.
138 // Be safe, and check if the CalleeCFG is valid.
142 if (getNumberStackFrames(Pred->getLocationContext())
143 == AMgr.InlineMaxStackDepth)
146 if (Engine.FunctionSummaries->hasReachedMaxBlockCount(FD))
149 if (CalleeCFG->getNumBlockIDs() > AMgr.InlineMaxFunctionSize)
155 // For now, skip inlining variadic functions.
156 // We also don't inline blocks.
157 static bool shouldInlineCallExpr(const CallExpr *CE, ExprEngine *E) {
158 if (!E->getAnalysisManager().shouldInlineCall())
160 QualType callee = CE->getCallee()->getType();
161 const FunctionProtoType *FT = 0;
162 if (const PointerType *PT = callee->getAs<PointerType>())
163 FT = dyn_cast<FunctionProtoType>(PT->getPointeeType());
164 else if (const BlockPointerType *BT = callee->getAs<BlockPointerType>()) {
165 // FIXME: inline blocks.
166 // FT = dyn_cast<FunctionProtoType>(BT->getPointeeType());
170 // If we have no prototype, assume the function is okay.
174 // Skip inlining of variadic functions.
175 return !FT->isVariadic();
178 bool ExprEngine::InlineCall(ExplodedNodeSet &Dst,
180 ExplodedNode *Pred) {
181 if (!shouldInlineCallExpr(CE, this))
184 ProgramStateRef state = Pred->getState();
185 const Expr *Callee = CE->getCallee();
186 const FunctionDecl *FD =
187 state->getSVal(Callee, Pred->getLocationContext()).getAsFunctionDecl();
188 if (!FD || !FD->hasBody(FD))
191 switch (CE->getStmtClass()) {
193 // FIXME: Handle C++.
195 case Stmt::CallExprClass: {
196 if (!shouldInlineDecl(FD, Pred))
199 // Construct a new stack frame for the callee.
200 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(FD);
201 const StackFrameContext *CallerSFC =
202 Pred->getLocationContext()->getCurrentStackFrame();
203 const StackFrameContext *CalleeSFC =
204 CalleeADC->getStackFrame(CallerSFC, CE,
205 currentBuilderContext->getBlock(),
208 CallEnter Loc(CE, CalleeSFC, Pred->getLocationContext());
210 if (ExplodedNode *N = G.getNode(Loc, state, false, &isNew)) {
211 N->addPredecessor(Pred, G);
213 Engine.getWorkList()->enqueue(N);
221 static bool isPointerToConst(const ParmVarDecl *ParamDecl) {
222 QualType PointeeTy = ParamDecl->getOriginalType()->getPointeeType();
223 if (PointeeTy != QualType() && PointeeTy.isConstQualified() &&
224 !PointeeTy->isAnyPointerType() && !PointeeTy->isReferenceType()) {
230 // Try to retrieve the function declaration and find the function parameter
231 // types which are pointers/references to a non-pointer const.
232 // We do not invalidate the corresponding argument regions.
233 static void findPtrToConstParams(llvm::SmallSet<unsigned, 1> &PreserveArgs,
234 const CallOrObjCMessage &Call) {
235 const Decl *CallDecl = Call.getDecl();
239 if (const FunctionDecl *FDecl = dyn_cast<FunctionDecl>(CallDecl)) {
240 const IdentifierInfo *II = FDecl->getIdentifier();
242 // List the cases, where the region should be invalidated even if the
243 // argument is const.
245 StringRef FName = II->getName();
246 // - 'int pthread_setspecific(ptheread_key k, const void *)' stores a
247 // value into thread local storage. The value can later be retrieved with
248 // 'void *ptheread_getspecific(pthread_key)'. So even thought the
249 // parameter is 'const void *', the region escapes through the call.
250 // - funopen - sets a buffer for future IO calls.
251 // - ObjC functions that end with "NoCopy" can free memory, of the passed
253 // - Many CF containers allow objects to escape through custom
254 // allocators/deallocators upon container construction.
255 // - NSXXInsertXX, for example NSMapInsertIfAbsent, since they can
256 // be deallocated by NSMapRemove.
257 if (FName == "pthread_setspecific" ||
258 FName == "funopen" ||
259 FName.endswith("NoCopy") ||
260 (FName.startswith("NS") &&
261 (FName.find("Insert") != StringRef::npos)) ||
262 Call.isCFCGAllowingEscape(FName))
266 for (unsigned Idx = 0, E = Call.getNumArgs(); Idx != E; ++Idx) {
267 if (FDecl && Idx < FDecl->getNumParams()) {
268 if (isPointerToConst(FDecl->getParamDecl(Idx)))
269 PreserveArgs.insert(Idx);
275 if (const ObjCMethodDecl *MDecl = dyn_cast<ObjCMethodDecl>(CallDecl)) {
276 assert(MDecl->param_size() <= Call.getNumArgs());
278 for (clang::ObjCMethodDecl::param_const_iterator
279 I = MDecl->param_begin(), E = MDecl->param_end(); I != E; ++I, ++Idx) {
280 if (isPointerToConst(*I))
281 PreserveArgs.insert(Idx);
288 ExprEngine::invalidateArguments(ProgramStateRef State,
289 const CallOrObjCMessage &Call,
290 const LocationContext *LC) {
291 SmallVector<const MemRegion *, 8> RegionsToInvalidate;
293 if (Call.isObjCMessage()) {
294 // Invalidate all instance variables of the receiver of an ObjC message.
295 // FIXME: We should be able to do better with inter-procedural analysis.
296 if (const MemRegion *MR = Call.getInstanceMessageReceiver(LC).getAsRegion())
297 RegionsToInvalidate.push_back(MR);
299 } else if (Call.isCXXCall()) {
300 // Invalidate all instance variables for the callee of a C++ method call.
301 // FIXME: We should be able to do better with inter-procedural analysis.
302 // FIXME: We can probably do better for const versus non-const methods.
303 if (const MemRegion *Callee = Call.getCXXCallee().getAsRegion())
304 RegionsToInvalidate.push_back(Callee);
306 } else if (Call.isFunctionCall()) {
307 // Block calls invalidate all captured-by-reference values.
308 SVal CalleeVal = Call.getFunctionCallee();
309 if (const MemRegion *Callee = CalleeVal.getAsRegion()) {
310 if (isa<BlockDataRegion>(Callee))
311 RegionsToInvalidate.push_back(Callee);
315 // Indexes of arguments whose values will be preserved by the call.
316 llvm::SmallSet<unsigned, 1> PreserveArgs;
317 findPtrToConstParams(PreserveArgs, Call);
319 for (unsigned idx = 0, e = Call.getNumArgs(); idx != e; ++idx) {
320 if (PreserveArgs.count(idx))
323 SVal V = Call.getArgSVal(idx);
325 // If we are passing a location wrapped as an integer, unwrap it and
326 // invalidate the values referred by the location.
327 if (nonloc::LocAsInteger *Wrapped = dyn_cast<nonloc::LocAsInteger>(&V))
328 V = Wrapped->getLoc();
329 else if (!isa<Loc>(V))
332 if (const MemRegion *R = V.getAsRegion()) {
333 // Invalidate the value of the variable passed by reference.
335 // Are we dealing with an ElementRegion? If the element type is
336 // a basic integer type (e.g., char, int) and the underlying region
337 // is a variable region then strip off the ElementRegion.
338 // FIXME: We really need to think about this for the general case
339 // as sometimes we are reasoning about arrays and other times
340 // about (char*), etc., is just a form of passing raw bytes.
341 // e.g., void *p = alloca(); foo((char*)p);
342 if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) {
343 // Checking for 'integral type' is probably too promiscuous, but
344 // we'll leave it in for now until we have a systematic way of
345 // handling all of these cases. Eventually we need to come up
346 // with an interface to StoreManager so that this logic can be
347 // appropriately delegated to the respective StoreManagers while
348 // still allowing us to do checker-specific logic (e.g.,
349 // invalidating reference counts), probably via callbacks.
350 if (ER->getElementType()->isIntegralOrEnumerationType()) {
351 const MemRegion *superReg = ER->getSuperRegion();
352 if (isa<VarRegion>(superReg) || isa<FieldRegion>(superReg) ||
353 isa<ObjCIvarRegion>(superReg))
354 R = cast<TypedRegion>(superReg);
356 // FIXME: What about layers of ElementRegions?
359 // Mark this region for invalidation. We batch invalidate regions
360 // below for efficiency.
361 RegionsToInvalidate.push_back(R);
363 // Nuke all other arguments passed by reference.
364 // FIXME: is this necessary or correct? This handles the non-Region
365 // cases. Is it ever valid to store to these?
366 State = State->unbindLoc(cast<Loc>(V));
370 // Invalidate designated regions using the batch invalidation API.
372 // FIXME: We can have collisions on the conjured symbol if the
373 // expression *I also creates conjured symbols. We probably want
374 // to identify conjured symbols by an expression pair: the enclosing
375 // expression (the context) and the expression itself. This should
376 // disambiguate conjured symbols.
377 unsigned Count = currentBuilderContext->getCurrentBlockCount();
378 StoreManager::InvalidatedSymbols IS;
380 // NOTE: Even if RegionsToInvalidate is empty, we may still invalidate
382 return State->invalidateRegions(RegionsToInvalidate,
383 Call.getOriginExpr(), Count, LC,
388 static ProgramStateRef getReplayWithoutInliningState(ExplodedNode *&N,
389 const CallExpr *CE) {
390 void *ReplayState = N->getState()->get<ReplayWithoutInlining>();
393 const CallExpr *ReplayCE = reinterpret_cast<const CallExpr*>(ReplayState);
394 if (CE == ReplayCE) {
395 return N->getState()->remove<ReplayWithoutInlining>();
400 void ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred,
401 ExplodedNodeSet &dst) {
402 // Perform the previsit of the CallExpr.
403 ExplodedNodeSet dstPreVisit;
404 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
406 // Now evaluate the call itself.
407 class DefaultEval : public GraphExpander {
412 DefaultEval(ExprEngine &eng, const CallExpr *ce)
413 : Eng(eng), CE(ce) {}
414 virtual void expandGraph(ExplodedNodeSet &Dst, ExplodedNode *Pred) {
416 ProgramStateRef state = getReplayWithoutInliningState(Pred, CE);
418 // First, try to inline the call.
419 if (state == 0 && Eng.InlineCall(Dst, CE, Pred))
422 // First handle the return value.
423 StmtNodeBuilder Bldr(Pred, Dst, *Eng.currentBuilderContext);
426 const Expr *Callee = CE->getCallee()->IgnoreParens();
428 state = Pred->getState();
429 SVal L = state->getSVal(Callee, Pred->getLocationContext());
431 // Figure out the result type. We do this dance to handle references.
433 if (const FunctionDecl *FD = L.getAsFunctionDecl())
434 ResultTy = FD->getResultType();
436 ResultTy = CE->getType();
439 ResultTy = Eng.getContext().getPointerType(ResultTy);
441 // Conjure a symbol value to use as the result.
442 SValBuilder &SVB = Eng.getSValBuilder();
443 unsigned Count = Eng.currentBuilderContext->getCurrentBlockCount();
444 const LocationContext *LCtx = Pred->getLocationContext();
445 SVal RetVal = SVB.getConjuredSymbolVal(0, CE, LCtx, ResultTy, Count);
447 // Generate a new state with the return value set.
448 state = state->BindExpr(CE, LCtx, RetVal);
450 // Invalidate the arguments.
451 state = Eng.invalidateArguments(state, CallOrObjCMessage(CE, state, LCtx),
454 // And make the result node.
455 Bldr.generateNode(CE, Pred, state);
459 // Finally, evaluate the function call. We try each of the checkers
460 // to see if the can evaluate the function call.
461 ExplodedNodeSet dstCallEvaluated;
462 DefaultEval defEval(*this, CE);
463 getCheckerManager().runCheckersForEvalCall(dstCallEvaluated,
465 CE, *this, &defEval);
467 // Finally, perform the post-condition check of the CallExpr and store
468 // the created nodes in 'Dst'.
469 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
473 void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
474 ExplodedNodeSet &Dst) {
476 ExplodedNodeSet dstPreVisit;
477 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
479 StmtNodeBuilder B(dstPreVisit, Dst, *currentBuilderContext);
481 if (RS->getRetValue()) {
482 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
483 ei = dstPreVisit.end(); it != ei; ++it) {
484 B.generateNode(RS, *it, (*it)->getState());