1 //==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements uninitialized values analysis for source-level CFGs.
12 //===----------------------------------------------------------------------===//
14 #include "clang/AST/ASTContext.h"
15 #include "clang/AST/Attr.h"
16 #include "clang/AST/Decl.h"
17 #include "clang/Analysis/Analyses/PostOrderCFGView.h"
18 #include "clang/Analysis/Analyses/UninitializedValues.h"
19 #include "clang/Analysis/AnalysisContext.h"
20 #include "clang/Analysis/CFG.h"
21 #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
22 #include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
23 #include "llvm/ADT/DenseMap.h"
24 #include "llvm/ADT/Optional.h"
25 #include "llvm/ADT/PackedVector.h"
26 #include "llvm/ADT/SmallBitVector.h"
27 #include "llvm/ADT/SmallVector.h"
28 #include "llvm/Support/SaveAndRestore.h"
31 using namespace clang;
33 #define DEBUG_LOGGING 0
35 static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
36 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
37 !vd->isExceptionVariable() &&
38 vd->getDeclContext() == dc) {
39 QualType ty = vd->getType();
40 return ty->isScalarType() || ty->isVectorType();
45 //------------------------------------------------------------------------====//
46 // DeclToIndex: a mapping from Decls we track to value indices.
47 //====------------------------------------------------------------------------//
51 llvm::DenseMap<const VarDecl *, unsigned> map;
55 /// Compute the actual mapping from declarations to bits.
56 void computeMap(const DeclContext &dc);
58 /// Return the number of declarations in the map.
59 unsigned size() const { return map.size(); }
61 /// Returns the bit vector index for a given declaration.
62 Optional<unsigned> getValueIndex(const VarDecl *d) const;
66 void DeclToIndex::computeMap(const DeclContext &dc) {
68 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
70 for ( ; I != E; ++I) {
71 const VarDecl *vd = *I;
72 if (isTrackedVar(vd, &dc))
77 Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
78 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
84 //------------------------------------------------------------------------====//
85 // CFGBlockValues: dataflow values for CFG blocks.
86 //====------------------------------------------------------------------------//
88 // These values are defined in such a way that a merge can be done using
90 enum Value { Unknown = 0x0, /* 00 */
91 Initialized = 0x1, /* 01 */
92 Uninitialized = 0x2, /* 10 */
93 MayUninitialized = 0x3 /* 11 */ };
95 static bool isUninitialized(const Value v) {
96 return v >= Uninitialized;
98 static bool isAlwaysUninit(const Value v) {
99 return v == Uninitialized;
104 typedef llvm::PackedVector<Value, 2, llvm::SmallBitVector> ValueVector;
106 class CFGBlockValues {
108 SmallVector<ValueVector, 8> vals;
110 DeclToIndex declToIndex;
112 CFGBlockValues(const CFG &cfg);
114 unsigned getNumEntries() const { return declToIndex.size(); }
116 void computeSetOfDeclarations(const DeclContext &dc);
117 ValueVector &getValueVector(const CFGBlock *block) {
118 return vals[block->getBlockID()];
121 void setAllScratchValues(Value V);
122 void mergeIntoScratch(ValueVector const &source, bool isFirst);
123 bool updateValueVectorWithScratch(const CFGBlock *block);
125 bool hasNoDeclarations() const {
126 return declToIndex.size() == 0;
131 ValueVector::reference operator[](const VarDecl *vd);
133 Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
135 const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
136 assert(idx.hasValue());
137 return getValueVector(block)[idx.getValue()];
140 } // end anonymous namespace
142 CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
144 void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
145 declToIndex.computeMap(dc);
146 unsigned decls = declToIndex.size();
147 scratch.resize(decls);
148 unsigned n = cfg.getNumBlockIDs();
152 for (unsigned i = 0; i < n; ++i)
153 vals[i].resize(decls);
157 static void printVector(const CFGBlock *block, ValueVector &bv,
159 llvm::errs() << block->getBlockID() << " :";
160 for (unsigned i = 0; i < bv.size(); ++i) {
161 llvm::errs() << ' ' << bv[i];
163 llvm::errs() << " : " << num << '\n';
167 void CFGBlockValues::setAllScratchValues(Value V) {
168 for (unsigned I = 0, E = scratch.size(); I != E; ++I)
172 void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
180 bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
181 ValueVector &dst = getValueVector(block);
182 bool changed = (dst != scratch);
186 printVector(block, scratch, 0);
191 void CFGBlockValues::resetScratch() {
195 ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
196 const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
197 assert(idx.hasValue());
198 return scratch[idx.getValue()];
201 //------------------------------------------------------------------------====//
202 // Worklist: worklist for dataflow analysis.
203 //====------------------------------------------------------------------------//
206 class DataflowWorklist {
207 PostOrderCFGView::iterator PO_I, PO_E;
208 SmallVector<const CFGBlock *, 20> worklist;
209 llvm::BitVector enqueuedBlocks;
211 DataflowWorklist(const CFG &cfg, PostOrderCFGView &view)
212 : PO_I(view.begin()), PO_E(view.end()),
213 enqueuedBlocks(cfg.getNumBlockIDs(), true) {
214 // Treat the first block as already analyzed.
216 assert(*PO_I == &cfg.getEntry());
217 enqueuedBlocks[(*PO_I)->getBlockID()] = false;
222 void enqueueSuccessors(const CFGBlock *block);
223 const CFGBlock *dequeue();
227 void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
228 for (CFGBlock::const_succ_iterator I = block->succ_begin(),
229 E = block->succ_end(); I != E; ++I) {
230 const CFGBlock *Successor = *I;
231 if (!Successor || enqueuedBlocks[Successor->getBlockID()])
233 worklist.push_back(Successor);
234 enqueuedBlocks[Successor->getBlockID()] = true;
238 const CFGBlock *DataflowWorklist::dequeue() {
239 const CFGBlock *B = 0;
241 // First dequeue from the worklist. This can represent
242 // updates along backedges that we want propagated as quickly as possible.
243 if (!worklist.empty()) {
247 // Next dequeue from the initial reverse post order. This is the
248 // theoretical ideal in the presence of no back edges.
249 else if (PO_I != PO_E) {
257 assert(enqueuedBlocks[B->getBlockID()] == true);
258 enqueuedBlocks[B->getBlockID()] = false;
262 //------------------------------------------------------------------------====//
263 // Classification of DeclRefExprs as use or initialization.
264 //====------------------------------------------------------------------------//
267 class FindVarResult {
269 const DeclRefExpr *dr;
271 FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
273 const DeclRefExpr *getDeclRefExpr() const { return dr; }
274 const VarDecl *getDecl() const { return vd; }
277 static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
279 Ex = Ex->IgnoreParenNoopCasts(C);
280 if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
281 if (CE->getCastKind() == CK_LValueBitCast) {
282 Ex = CE->getSubExpr();
291 /// If E is an expression comprising a reference to a single variable, find that
293 static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
294 if (const DeclRefExpr *DRE =
295 dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
296 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
297 if (isTrackedVar(VD, DC))
298 return FindVarResult(VD, DRE);
299 return FindVarResult(0, 0);
302 /// \brief Classify each DeclRefExpr as an initialization or a use. Any
303 /// DeclRefExpr which isn't explicitly classified will be assumed to have
304 /// escaped the analysis and will be treated as an initialization.
305 class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
315 const DeclContext *DC;
316 llvm::DenseMap<const DeclRefExpr*, Class> Classification;
318 bool isTrackedVar(const VarDecl *VD) const {
319 return ::isTrackedVar(VD, DC);
322 void classify(const Expr *E, Class C);
325 ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
327 void VisitDeclStmt(DeclStmt *DS);
328 void VisitUnaryOperator(UnaryOperator *UO);
329 void VisitBinaryOperator(BinaryOperator *BO);
330 void VisitCallExpr(CallExpr *CE);
331 void VisitCastExpr(CastExpr *CE);
333 void operator()(Stmt *S) { Visit(S); }
335 Class get(const DeclRefExpr *DRE) const {
336 llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
337 = Classification.find(DRE);
338 if (I != Classification.end())
341 const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
342 if (!VD || !isTrackedVar(VD))
350 static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
351 if (Expr *Init = VD->getInit()) {
352 const DeclRefExpr *DRE
353 = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
354 if (DRE && DRE->getDecl() == VD)
360 void ClassifyRefs::classify(const Expr *E, Class C) {
361 // The result of a ?: could also be an lvalue.
362 E = E->IgnoreParens();
363 if (const ConditionalOperator *CO = dyn_cast<ConditionalOperator>(E)) {
364 const Expr *TrueExpr = CO->getTrueExpr();
365 if (!isa<OpaqueValueExpr>(TrueExpr))
366 classify(TrueExpr, C);
367 classify(CO->getFalseExpr(), C);
371 FindVarResult Var = findVar(E, DC);
372 if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
373 Classification[DRE] = std::max(Classification[DRE], C);
376 void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
377 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
379 VarDecl *VD = dyn_cast<VarDecl>(*DI);
380 if (VD && isTrackedVar(VD))
381 if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
382 Classification[DRE] = SelfInit;
386 void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
387 // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
388 // is not a compound-assignment, we will treat it as initializing the variable
389 // when TransferFunctions visits it. A compound-assignment does not affect
390 // whether a variable is uninitialized, and there's no point counting it as a
392 if (BO->isCompoundAssignmentOp())
393 classify(BO->getLHS(), Use);
394 else if (BO->getOpcode() == BO_Assign)
395 classify(BO->getLHS(), Ignore);
398 void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
399 // Increment and decrement are uses despite there being no lvalue-to-rvalue
401 if (UO->isIncrementDecrementOp())
402 classify(UO->getSubExpr(), Use);
405 void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
406 // If a value is passed by const reference to a function, we should not assume
407 // that it is initialized by the call, and we conservatively do not assume
409 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
411 if ((*I)->getType().isConstQualified() && (*I)->isGLValue())
412 classify(*I, Ignore);
415 void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
416 if (CE->getCastKind() == CK_LValueToRValue)
417 classify(CE->getSubExpr(), Use);
418 else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
419 if (CSE->getType()->isVoidType()) {
420 // Squelch any detected load of an uninitialized value if
421 // we cast it to void.
423 classify(CSE->getSubExpr(), Ignore);
428 //------------------------------------------------------------------------====//
429 // Transfer function for uninitialized values analysis.
430 //====------------------------------------------------------------------------//
433 class TransferFunctions : public StmtVisitor<TransferFunctions> {
434 CFGBlockValues &vals;
436 const CFGBlock *block;
437 AnalysisDeclContext ∾
438 const ClassifyRefs &classification;
439 ObjCNoReturn objCNoRet;
440 UninitVariablesHandler &handler;
443 TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
444 const CFGBlock *block, AnalysisDeclContext &ac,
445 const ClassifyRefs &classification,
446 UninitVariablesHandler &handler)
447 : vals(vals), cfg(cfg), block(block), ac(ac),
448 classification(classification), objCNoRet(ac.getASTContext()),
451 void reportUse(const Expr *ex, const VarDecl *vd);
453 void VisitBinaryOperator(BinaryOperator *bo);
454 void VisitBlockExpr(BlockExpr *be);
455 void VisitCallExpr(CallExpr *ce);
456 void VisitDeclRefExpr(DeclRefExpr *dr);
457 void VisitDeclStmt(DeclStmt *ds);
458 void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
459 void VisitObjCMessageExpr(ObjCMessageExpr *ME);
461 bool isTrackedVar(const VarDecl *vd) {
462 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
465 FindVarResult findVar(const Expr *ex) {
466 return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
469 UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
470 UninitUse Use(ex, isAlwaysUninit(v));
472 assert(isUninitialized(v));
473 if (Use.getKind() == UninitUse::Always)
476 // If an edge which leads unconditionally to this use did not initialize
477 // the variable, we can say something stronger than 'may be uninitialized':
478 // we can say 'either it's used uninitialized or you have dead code'.
480 // We track the number of successors of a node which have been visited, and
481 // visit a node once we have visited all of its successors. Only edges where
482 // the variable might still be uninitialized are followed. Since a variable
483 // can't transfer from being initialized to being uninitialized, this will
484 // trace out the subgraph which inevitably leads to the use and does not
485 // initialize the variable. We do not want to skip past loops, since their
486 // non-termination might be correlated with the initialization condition.
490 // void f(bool a, bool b) {
495 // block4: } else if (b) {
496 // block5: while (!a) {
497 // block6: do_work(&a);
506 // Starting from the maybe-uninitialized use in block 9:
507 // * Block 7 is not visited because we have only visited one of its two
509 // * Block 8 is visited because we've visited its only successor.
511 // * Block 7 is visited because we've now visited both of its successors.
513 // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
514 // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
515 // * Block 3 is not visited because it initializes 'n'.
516 // Now the algorithm terminates, having visited blocks 7 and 8, and having
517 // found the frontier is blocks 2, 4, and 5.
519 // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
520 // and 4), so we report that any time either of those edges is taken (in
521 // each case when 'b == false'), 'n' is used uninitialized.
522 SmallVector<const CFGBlock*, 32> Queue;
523 SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
524 Queue.push_back(block);
525 // Specify that we've already visited all successors of the starting block.
526 // This has the dual purpose of ensuring we never add it to the queue, and
527 // of marking it as not being a candidate element of the frontier.
528 SuccsVisited[block->getBlockID()] = block->succ_size();
529 while (!Queue.empty()) {
530 const CFGBlock *B = Queue.back();
532 for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
534 const CFGBlock *Pred = *I;
535 if (vals.getValue(Pred, B, vd) == Initialized)
536 // This block initializes the variable.
539 unsigned &SV = SuccsVisited[Pred->getBlockID()];
541 // When visiting the first successor of a block, mark all NULL
542 // successors as having been visited.
543 for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
544 SE = Pred->succ_end();
550 if (++SV == Pred->succ_size())
551 // All paths from this block lead to the use and don't initialize the
553 Queue.push_back(Pred);
557 // Scan the frontier, looking for blocks where the variable was
559 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
560 const CFGBlock *Block = *BI;
561 unsigned BlockID = Block->getBlockID();
562 const Stmt *Term = Block->getTerminator();
563 if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
565 // This block inevitably leads to the use. If we have an edge from here
566 // to a post-dominator block, and the variable is uninitialized on that
567 // edge, we have found a bug.
568 for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
569 E = Block->succ_end(); I != E; ++I) {
570 const CFGBlock *Succ = *I;
571 if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
572 vals.getValue(Block, Succ, vd) == Uninitialized) {
573 // Switch cases are a special case: report the label to the caller
574 // as the 'terminator', not the switch statement itself. Suppress
575 // situations where no label matched: we can't be sure that's
577 if (isa<SwitchStmt>(Term)) {
578 const Stmt *Label = Succ->getLabel();
579 if (!Label || !isa<SwitchCase>(Label))
580 // Might not be possible.
582 UninitUse::Branch Branch;
583 Branch.Terminator = Label;
584 Branch.Output = 0; // Ignored.
585 Use.addUninitBranch(Branch);
587 UninitUse::Branch Branch;
588 Branch.Terminator = Term;
589 Branch.Output = I - Block->succ_begin();
590 Use.addUninitBranch(Branch);
602 void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
604 if (isUninitialized(v))
605 handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
608 void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
609 // This represents an initialization of the 'element' value.
610 if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
611 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
612 if (isTrackedVar(VD))
613 vals[VD] = Initialized;
617 void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
618 const BlockDecl *bd = be->getBlockDecl();
619 for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
620 e = bd->capture_end() ; i != e; ++i) {
621 const VarDecl *vd = i->getVariable();
622 if (!isTrackedVar(vd))
625 vals[vd] = Initialized;
632 void TransferFunctions::VisitCallExpr(CallExpr *ce) {
633 if (Decl *Callee = ce->getCalleeDecl()) {
634 if (Callee->hasAttr<ReturnsTwiceAttr>()) {
635 // After a call to a function like setjmp or vfork, any variable which is
636 // initialized anywhere within this function may now be initialized. For
637 // now, just assume such a call initializes all variables. FIXME: Only
638 // mark variables as initialized if they have an initializer which is
639 // reachable from here.
640 vals.setAllScratchValues(Initialized);
642 else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
643 // Functions labeled like "analyzer_noreturn" are often used to denote
644 // "panic" functions that in special debug situations can still return,
645 // but for the most part should not be treated as returning. This is a
646 // useful annotation borrowed from the static analyzer that is useful for
647 // suppressing branch-specific false positives when we call one of these
648 // functions but keep pretending the path continues (when in reality the
649 // user doesn't care).
650 vals.setAllScratchValues(Unknown);
655 void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
656 switch (classification.get(dr)) {
657 case ClassifyRefs::Ignore:
659 case ClassifyRefs::Use:
660 reportUse(dr, cast<VarDecl>(dr->getDecl()));
662 case ClassifyRefs::Init:
663 vals[cast<VarDecl>(dr->getDecl())] = Initialized;
665 case ClassifyRefs::SelfInit:
666 handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
671 void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
672 if (BO->getOpcode() == BO_Assign) {
673 FindVarResult Var = findVar(BO->getLHS());
674 if (const VarDecl *VD = Var.getDecl())
675 vals[VD] = Initialized;
679 void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
680 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
682 VarDecl *VD = dyn_cast<VarDecl>(*DI);
683 if (VD && isTrackedVar(VD)) {
684 if (getSelfInitExpr(VD)) {
685 // If the initializer consists solely of a reference to itself, we
686 // explicitly mark the variable as uninitialized. This allows code
687 // like the following:
691 // to deliberately leave a variable uninitialized. Different analysis
692 // clients can detect this pattern and adjust their reporting
693 // appropriately, but we need to continue to analyze subsequent uses
695 vals[VD] = Uninitialized;
696 } else if (VD->getInit()) {
697 // Treat the new variable as initialized.
698 vals[VD] = Initialized;
700 // No initializer: the variable is now uninitialized. This matters
707 // FIXME: Mark the variable as uninitialized whenever its scope is
708 // left, since its scope could be re-entered by a jump over the
710 vals[VD] = Uninitialized;
716 void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
717 // If the Objective-C message expression is an implicit no-return that
718 // is not modeled in the CFG, set the tracked dataflow values to Unknown.
719 if (objCNoRet.isImplicitNoReturn(ME)) {
720 vals.setAllScratchValues(Unknown);
724 //------------------------------------------------------------------------====//
725 // High-level "driver" logic for uninitialized values analysis.
726 //====------------------------------------------------------------------------//
728 static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
729 AnalysisDeclContext &ac, CFGBlockValues &vals,
730 const ClassifyRefs &classification,
731 llvm::BitVector &wasAnalyzed,
732 UninitVariablesHandler &handler) {
733 wasAnalyzed[block->getBlockID()] = true;
735 // Merge in values of predecessor blocks.
737 for (CFGBlock::const_pred_iterator I = block->pred_begin(),
738 E = block->pred_end(); I != E; ++I) {
739 const CFGBlock *pred = *I;
740 if (wasAnalyzed[pred->getBlockID()]) {
741 vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
745 // Apply the transfer function.
746 TransferFunctions tf(vals, cfg, block, ac, classification, handler);
747 for (CFGBlock::const_iterator I = block->begin(), E = block->end();
749 if (Optional<CFGStmt> cs = I->getAs<CFGStmt>())
750 tf.Visit(const_cast<Stmt*>(cs->getStmt()));
752 return vals.updateValueVectorWithScratch(block);
755 /// PruneBlocksHandler is a special UninitVariablesHandler that is used
756 /// to detect when a CFGBlock has any *potential* use of an uninitialized
757 /// variable. It is mainly used to prune out work during the final
760 struct PruneBlocksHandler : public UninitVariablesHandler {
761 PruneBlocksHandler(unsigned numBlocks)
762 : hadUse(numBlocks, false), hadAnyUse(false),
765 virtual ~PruneBlocksHandler() {}
767 /// Records if a CFGBlock had a potential use of an uninitialized variable.
768 llvm::BitVector hadUse;
770 /// Records if any CFGBlock had a potential use of an uninitialized variable.
773 /// The current block to scribble use information.
774 unsigned currentBlock;
776 virtual void handleUseOfUninitVariable(const VarDecl *vd,
777 const UninitUse &use) {
778 hadUse[currentBlock] = true;
782 /// Called when the uninitialized variable analysis detects the
783 /// idiom 'int x = x'. All other uses of 'x' within the initializer
784 /// are handled by handleUseOfUninitVariable.
785 virtual void handleSelfInit(const VarDecl *vd) {
786 hadUse[currentBlock] = true;
792 void clang::runUninitializedVariablesAnalysis(
793 const DeclContext &dc,
795 AnalysisDeclContext &ac,
796 UninitVariablesHandler &handler,
797 UninitVariablesAnalysisStats &stats) {
798 CFGBlockValues vals(cfg);
799 vals.computeSetOfDeclarations(dc);
800 if (vals.hasNoDeclarations())
803 stats.NumVariablesAnalyzed = vals.getNumEntries();
805 // Precompute which expressions are uses and which are initializations.
806 ClassifyRefs classification(ac);
807 cfg.VisitBlockStmts(classification);
809 // Mark all variables uninitialized at the entry.
810 const CFGBlock &entry = cfg.getEntry();
811 ValueVector &vec = vals.getValueVector(&entry);
812 const unsigned n = vals.getNumEntries();
813 for (unsigned j = 0; j < n ; ++j) {
814 vec[j] = Uninitialized;
817 // Proceed with the workist.
818 DataflowWorklist worklist(cfg, *ac.getAnalysis<PostOrderCFGView>());
819 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
820 worklist.enqueueSuccessors(&cfg.getEntry());
821 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
822 wasAnalyzed[cfg.getEntry().getBlockID()] = true;
823 PruneBlocksHandler PBH(cfg.getNumBlockIDs());
825 while (const CFGBlock *block = worklist.dequeue()) {
826 PBH.currentBlock = block->getBlockID();
828 // Did the block change?
829 bool changed = runOnBlock(block, cfg, ac, vals,
830 classification, wasAnalyzed, PBH);
831 ++stats.NumBlockVisits;
832 if (changed || !previouslyVisited[block->getBlockID()])
833 worklist.enqueueSuccessors(block);
834 previouslyVisited[block->getBlockID()] = true;
840 // Run through the blocks one more time, and report uninitialized variables.
841 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
842 const CFGBlock *block = *BI;
843 if (PBH.hadUse[block->getBlockID()]) {
844 runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
845 ++stats.NumBlockVisits;
850 UninitVariablesHandler::~UninitVariablesHandler() {}