1 //==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements uninitialized values analysis for source-level CFGs.
12 //===----------------------------------------------------------------------===//
14 #include "clang/AST/ASTContext.h"
15 #include "clang/AST/Attr.h"
16 #include "clang/AST/Decl.h"
17 #include "clang/AST/StmtVisitor.h"
18 #include "clang/Analysis/Analyses/PostOrderCFGView.h"
19 #include "clang/Analysis/Analyses/UninitializedValues.h"
20 #include "clang/Analysis/AnalysisContext.h"
21 #include "clang/Analysis/CFG.h"
22 #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
23 #include "llvm/ADT/DenseMap.h"
24 #include "llvm/ADT/Optional.h"
25 #include "llvm/ADT/PackedVector.h"
26 #include "llvm/ADT/SmallBitVector.h"
27 #include "llvm/ADT/SmallVector.h"
28 #include "llvm/Support/SaveAndRestore.h"
31 using namespace clang;
33 #define DEBUG_LOGGING 0
35 static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
36 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
37 !vd->isExceptionVariable() &&
38 vd->getDeclContext() == dc) {
39 QualType ty = vd->getType();
40 return ty->isScalarType() || ty->isVectorType();
45 //------------------------------------------------------------------------====//
46 // DeclToIndex: a mapping from Decls we track to value indices.
47 //====------------------------------------------------------------------------//
51 llvm::DenseMap<const VarDecl *, unsigned> map;
55 /// Compute the actual mapping from declarations to bits.
56 void computeMap(const DeclContext &dc);
58 /// Return the number of declarations in the map.
59 unsigned size() const { return map.size(); }
61 /// Returns the bit vector index for a given declaration.
62 Optional<unsigned> getValueIndex(const VarDecl *d) const;
66 void DeclToIndex::computeMap(const DeclContext &dc) {
68 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
70 for ( ; I != E; ++I) {
71 const VarDecl *vd = *I;
72 if (isTrackedVar(vd, &dc))
77 Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
78 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
84 //------------------------------------------------------------------------====//
85 // CFGBlockValues: dataflow values for CFG blocks.
86 //====------------------------------------------------------------------------//
88 // These values are defined in such a way that a merge can be done using
90 enum Value { Unknown = 0x0, /* 00 */
91 Initialized = 0x1, /* 01 */
92 Uninitialized = 0x2, /* 10 */
93 MayUninitialized = 0x3 /* 11 */ };
95 static bool isUninitialized(const Value v) {
96 return v >= Uninitialized;
98 static bool isAlwaysUninit(const Value v) {
99 return v == Uninitialized;
104 typedef llvm::PackedVector<Value, 2, llvm::SmallBitVector> ValueVector;
106 class CFGBlockValues {
108 SmallVector<ValueVector, 8> vals;
110 DeclToIndex declToIndex;
112 CFGBlockValues(const CFG &cfg);
114 unsigned getNumEntries() const { return declToIndex.size(); }
116 void computeSetOfDeclarations(const DeclContext &dc);
117 ValueVector &getValueVector(const CFGBlock *block) {
118 return vals[block->getBlockID()];
121 void setAllScratchValues(Value V);
122 void mergeIntoScratch(ValueVector const &source, bool isFirst);
123 bool updateValueVectorWithScratch(const CFGBlock *block);
125 bool hasNoDeclarations() const {
126 return declToIndex.size() == 0;
131 ValueVector::reference operator[](const VarDecl *vd);
133 Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
135 const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
136 assert(idx.hasValue());
137 return getValueVector(block)[idx.getValue()];
140 } // end anonymous namespace
142 CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
144 void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
145 declToIndex.computeMap(dc);
146 unsigned decls = declToIndex.size();
147 scratch.resize(decls);
148 unsigned n = cfg.getNumBlockIDs();
152 for (unsigned i = 0; i < n; ++i)
153 vals[i].resize(decls);
157 static void printVector(const CFGBlock *block, ValueVector &bv,
159 llvm::errs() << block->getBlockID() << " :";
160 for (unsigned i = 0; i < bv.size(); ++i) {
161 llvm::errs() << ' ' << bv[i];
163 llvm::errs() << " : " << num << '\n';
167 void CFGBlockValues::setAllScratchValues(Value V) {
168 for (unsigned I = 0, E = scratch.size(); I != E; ++I)
172 void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
180 bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
181 ValueVector &dst = getValueVector(block);
182 bool changed = (dst != scratch);
186 printVector(block, scratch, 0);
191 void CFGBlockValues::resetScratch() {
195 ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
196 const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
197 assert(idx.hasValue());
198 return scratch[idx.getValue()];
201 //------------------------------------------------------------------------====//
202 // Worklist: worklist for dataflow analysis.
203 //====------------------------------------------------------------------------//
206 class DataflowWorklist {
207 PostOrderCFGView::iterator PO_I, PO_E;
208 SmallVector<const CFGBlock *, 20> worklist;
209 llvm::BitVector enqueuedBlocks;
211 DataflowWorklist(const CFG &cfg, PostOrderCFGView &view)
212 : PO_I(view.begin()), PO_E(view.end()),
213 enqueuedBlocks(cfg.getNumBlockIDs(), true) {
214 // Treat the first block as already analyzed.
216 assert(*PO_I == &cfg.getEntry());
217 enqueuedBlocks[(*PO_I)->getBlockID()] = false;
222 void enqueueSuccessors(const CFGBlock *block);
223 const CFGBlock *dequeue();
227 void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
228 for (CFGBlock::const_succ_iterator I = block->succ_begin(),
229 E = block->succ_end(); I != E; ++I) {
230 const CFGBlock *Successor = *I;
231 if (!Successor || enqueuedBlocks[Successor->getBlockID()])
233 worklist.push_back(Successor);
234 enqueuedBlocks[Successor->getBlockID()] = true;
238 const CFGBlock *DataflowWorklist::dequeue() {
239 const CFGBlock *B = 0;
241 // First dequeue from the worklist. This can represent
242 // updates along backedges that we want propagated as quickly as possible.
243 if (!worklist.empty())
244 B = worklist.pop_back_val();
246 // Next dequeue from the initial reverse post order. This is the
247 // theoretical ideal in the presence of no back edges.
248 else if (PO_I != PO_E) {
256 assert(enqueuedBlocks[B->getBlockID()] == true);
257 enqueuedBlocks[B->getBlockID()] = false;
261 //------------------------------------------------------------------------====//
262 // Classification of DeclRefExprs as use or initialization.
263 //====------------------------------------------------------------------------//
266 class FindVarResult {
268 const DeclRefExpr *dr;
270 FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
272 const DeclRefExpr *getDeclRefExpr() const { return dr; }
273 const VarDecl *getDecl() const { return vd; }
276 static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
278 Ex = Ex->IgnoreParenNoopCasts(C);
279 if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
280 if (CE->getCastKind() == CK_LValueBitCast) {
281 Ex = CE->getSubExpr();
290 /// If E is an expression comprising a reference to a single variable, find that
292 static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
293 if (const DeclRefExpr *DRE =
294 dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
295 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
296 if (isTrackedVar(VD, DC))
297 return FindVarResult(VD, DRE);
298 return FindVarResult(0, 0);
301 /// \brief Classify each DeclRefExpr as an initialization or a use. Any
302 /// DeclRefExpr which isn't explicitly classified will be assumed to have
303 /// escaped the analysis and will be treated as an initialization.
304 class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
314 const DeclContext *DC;
315 llvm::DenseMap<const DeclRefExpr*, Class> Classification;
317 bool isTrackedVar(const VarDecl *VD) const {
318 return ::isTrackedVar(VD, DC);
321 void classify(const Expr *E, Class C);
324 ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
326 void VisitDeclStmt(DeclStmt *DS);
327 void VisitUnaryOperator(UnaryOperator *UO);
328 void VisitBinaryOperator(BinaryOperator *BO);
329 void VisitCallExpr(CallExpr *CE);
330 void VisitCastExpr(CastExpr *CE);
332 void operator()(Stmt *S) { Visit(S); }
334 Class get(const DeclRefExpr *DRE) const {
335 llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
336 = Classification.find(DRE);
337 if (I != Classification.end())
340 const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
341 if (!VD || !isTrackedVar(VD))
349 static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
350 if (Expr *Init = VD->getInit()) {
351 const DeclRefExpr *DRE
352 = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
353 if (DRE && DRE->getDecl() == VD)
359 void ClassifyRefs::classify(const Expr *E, Class C) {
360 // The result of a ?: could also be an lvalue.
361 E = E->IgnoreParens();
362 if (const ConditionalOperator *CO = dyn_cast<ConditionalOperator>(E)) {
363 const Expr *TrueExpr = CO->getTrueExpr();
364 if (!isa<OpaqueValueExpr>(TrueExpr))
365 classify(TrueExpr, C);
366 classify(CO->getFalseExpr(), C);
370 FindVarResult Var = findVar(E, DC);
371 if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
372 Classification[DRE] = std::max(Classification[DRE], C);
375 void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
376 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
378 VarDecl *VD = dyn_cast<VarDecl>(*DI);
379 if (VD && isTrackedVar(VD))
380 if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
381 Classification[DRE] = SelfInit;
385 void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
386 // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
387 // is not a compound-assignment, we will treat it as initializing the variable
388 // when TransferFunctions visits it. A compound-assignment does not affect
389 // whether a variable is uninitialized, and there's no point counting it as a
391 if (BO->isCompoundAssignmentOp())
392 classify(BO->getLHS(), Use);
393 else if (BO->getOpcode() == BO_Assign)
394 classify(BO->getLHS(), Ignore);
397 void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
398 // Increment and decrement are uses despite there being no lvalue-to-rvalue
400 if (UO->isIncrementDecrementOp())
401 classify(UO->getSubExpr(), Use);
404 void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
405 // If a value is passed by const reference to a function, we should not assume
406 // that it is initialized by the call, and we conservatively do not assume
408 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
410 if ((*I)->getType().isConstQualified() && (*I)->isGLValue())
411 classify(*I, Ignore);
414 void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
415 if (CE->getCastKind() == CK_LValueToRValue)
416 classify(CE->getSubExpr(), Use);
417 else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
418 if (CSE->getType()->isVoidType()) {
419 // Squelch any detected load of an uninitialized value if
420 // we cast it to void.
422 classify(CSE->getSubExpr(), Ignore);
427 //------------------------------------------------------------------------====//
428 // Transfer function for uninitialized values analysis.
429 //====------------------------------------------------------------------------//
432 class TransferFunctions : public StmtVisitor<TransferFunctions> {
433 CFGBlockValues &vals;
435 const CFGBlock *block;
436 AnalysisDeclContext ∾
437 const ClassifyRefs &classification;
438 ObjCNoReturn objCNoRet;
439 UninitVariablesHandler &handler;
442 TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
443 const CFGBlock *block, AnalysisDeclContext &ac,
444 const ClassifyRefs &classification,
445 UninitVariablesHandler &handler)
446 : vals(vals), cfg(cfg), block(block), ac(ac),
447 classification(classification), objCNoRet(ac.getASTContext()),
450 void reportUse(const Expr *ex, const VarDecl *vd);
452 void VisitBinaryOperator(BinaryOperator *bo);
453 void VisitBlockExpr(BlockExpr *be);
454 void VisitCallExpr(CallExpr *ce);
455 void VisitDeclRefExpr(DeclRefExpr *dr);
456 void VisitDeclStmt(DeclStmt *ds);
457 void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
458 void VisitObjCMessageExpr(ObjCMessageExpr *ME);
460 bool isTrackedVar(const VarDecl *vd) {
461 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
464 FindVarResult findVar(const Expr *ex) {
465 return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
468 UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
469 UninitUse Use(ex, isAlwaysUninit(v));
471 assert(isUninitialized(v));
472 if (Use.getKind() == UninitUse::Always)
475 // If an edge which leads unconditionally to this use did not initialize
476 // the variable, we can say something stronger than 'may be uninitialized':
477 // we can say 'either it's used uninitialized or you have dead code'.
479 // We track the number of successors of a node which have been visited, and
480 // visit a node once we have visited all of its successors. Only edges where
481 // the variable might still be uninitialized are followed. Since a variable
482 // can't transfer from being initialized to being uninitialized, this will
483 // trace out the subgraph which inevitably leads to the use and does not
484 // initialize the variable. We do not want to skip past loops, since their
485 // non-termination might be correlated with the initialization condition.
489 // void f(bool a, bool b) {
494 // block4: } else if (b) {
495 // block5: while (!a) {
496 // block6: do_work(&a);
505 // Starting from the maybe-uninitialized use in block 9:
506 // * Block 7 is not visited because we have only visited one of its two
508 // * Block 8 is visited because we've visited its only successor.
510 // * Block 7 is visited because we've now visited both of its successors.
512 // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
513 // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
514 // * Block 3 is not visited because it initializes 'n'.
515 // Now the algorithm terminates, having visited blocks 7 and 8, and having
516 // found the frontier is blocks 2, 4, and 5.
518 // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
519 // and 4), so we report that any time either of those edges is taken (in
520 // each case when 'b == false'), 'n' is used uninitialized.
521 SmallVector<const CFGBlock*, 32> Queue;
522 SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
523 Queue.push_back(block);
524 // Specify that we've already visited all successors of the starting block.
525 // This has the dual purpose of ensuring we never add it to the queue, and
526 // of marking it as not being a candidate element of the frontier.
527 SuccsVisited[block->getBlockID()] = block->succ_size();
528 while (!Queue.empty()) {
529 const CFGBlock *B = Queue.pop_back_val();
531 // If the use is always reached from the entry block, make a note of that.
532 if (B == &cfg.getEntry())
533 Use.setUninitAfterCall();
535 for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
537 const CFGBlock *Pred = *I;
538 Value AtPredExit = vals.getValue(Pred, B, vd);
539 if (AtPredExit == Initialized)
540 // This block initializes the variable.
542 if (AtPredExit == MayUninitialized &&
543 vals.getValue(B, 0, vd) == Uninitialized) {
544 // This block declares the variable (uninitialized), and is reachable
545 // from a block that initializes the variable. We can't guarantee to
546 // give an earlier location for the diagnostic (and it appears that
547 // this code is intended to be reachable) so give a diagnostic here
548 // and go no further down this path.
549 Use.setUninitAfterDecl();
553 unsigned &SV = SuccsVisited[Pred->getBlockID()];
555 // When visiting the first successor of a block, mark all NULL
556 // successors as having been visited.
557 for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
558 SE = Pred->succ_end();
564 if (++SV == Pred->succ_size())
565 // All paths from this block lead to the use and don't initialize the
567 Queue.push_back(Pred);
571 // Scan the frontier, looking for blocks where the variable was
573 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
574 const CFGBlock *Block = *BI;
575 unsigned BlockID = Block->getBlockID();
576 const Stmt *Term = Block->getTerminator();
577 if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
579 // This block inevitably leads to the use. If we have an edge from here
580 // to a post-dominator block, and the variable is uninitialized on that
581 // edge, we have found a bug.
582 for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
583 E = Block->succ_end(); I != E; ++I) {
584 const CFGBlock *Succ = *I;
585 if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
586 vals.getValue(Block, Succ, vd) == Uninitialized) {
587 // Switch cases are a special case: report the label to the caller
588 // as the 'terminator', not the switch statement itself. Suppress
589 // situations where no label matched: we can't be sure that's
591 if (isa<SwitchStmt>(Term)) {
592 const Stmt *Label = Succ->getLabel();
593 if (!Label || !isa<SwitchCase>(Label))
594 // Might not be possible.
596 UninitUse::Branch Branch;
597 Branch.Terminator = Label;
598 Branch.Output = 0; // Ignored.
599 Use.addUninitBranch(Branch);
601 UninitUse::Branch Branch;
602 Branch.Terminator = Term;
603 Branch.Output = I - Block->succ_begin();
604 Use.addUninitBranch(Branch);
616 void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
618 if (isUninitialized(v))
619 handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
622 void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
623 // This represents an initialization of the 'element' value.
624 if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
625 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
626 if (isTrackedVar(VD))
627 vals[VD] = Initialized;
631 void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
632 const BlockDecl *bd = be->getBlockDecl();
633 for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
634 e = bd->capture_end() ; i != e; ++i) {
635 const VarDecl *vd = i->getVariable();
636 if (!isTrackedVar(vd))
639 vals[vd] = Initialized;
646 void TransferFunctions::VisitCallExpr(CallExpr *ce) {
647 if (Decl *Callee = ce->getCalleeDecl()) {
648 if (Callee->hasAttr<ReturnsTwiceAttr>()) {
649 // After a call to a function like setjmp or vfork, any variable which is
650 // initialized anywhere within this function may now be initialized. For
651 // now, just assume such a call initializes all variables. FIXME: Only
652 // mark variables as initialized if they have an initializer which is
653 // reachable from here.
654 vals.setAllScratchValues(Initialized);
656 else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
657 // Functions labeled like "analyzer_noreturn" are often used to denote
658 // "panic" functions that in special debug situations can still return,
659 // but for the most part should not be treated as returning. This is a
660 // useful annotation borrowed from the static analyzer that is useful for
661 // suppressing branch-specific false positives when we call one of these
662 // functions but keep pretending the path continues (when in reality the
663 // user doesn't care).
664 vals.setAllScratchValues(Unknown);
669 void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
670 switch (classification.get(dr)) {
671 case ClassifyRefs::Ignore:
673 case ClassifyRefs::Use:
674 reportUse(dr, cast<VarDecl>(dr->getDecl()));
676 case ClassifyRefs::Init:
677 vals[cast<VarDecl>(dr->getDecl())] = Initialized;
679 case ClassifyRefs::SelfInit:
680 handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
685 void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
686 if (BO->getOpcode() == BO_Assign) {
687 FindVarResult Var = findVar(BO->getLHS());
688 if (const VarDecl *VD = Var.getDecl())
689 vals[VD] = Initialized;
693 void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
694 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
696 VarDecl *VD = dyn_cast<VarDecl>(*DI);
697 if (VD && isTrackedVar(VD)) {
698 if (getSelfInitExpr(VD)) {
699 // If the initializer consists solely of a reference to itself, we
700 // explicitly mark the variable as uninitialized. This allows code
701 // like the following:
705 // to deliberately leave a variable uninitialized. Different analysis
706 // clients can detect this pattern and adjust their reporting
707 // appropriately, but we need to continue to analyze subsequent uses
709 vals[VD] = Uninitialized;
710 } else if (VD->getInit()) {
711 // Treat the new variable as initialized.
712 vals[VD] = Initialized;
714 // No initializer: the variable is now uninitialized. This matters
721 // FIXME: Mark the variable as uninitialized whenever its scope is
722 // left, since its scope could be re-entered by a jump over the
724 vals[VD] = Uninitialized;
730 void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
731 // If the Objective-C message expression is an implicit no-return that
732 // is not modeled in the CFG, set the tracked dataflow values to Unknown.
733 if (objCNoRet.isImplicitNoReturn(ME)) {
734 vals.setAllScratchValues(Unknown);
738 //------------------------------------------------------------------------====//
739 // High-level "driver" logic for uninitialized values analysis.
740 //====------------------------------------------------------------------------//
742 static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
743 AnalysisDeclContext &ac, CFGBlockValues &vals,
744 const ClassifyRefs &classification,
745 llvm::BitVector &wasAnalyzed,
746 UninitVariablesHandler &handler) {
747 wasAnalyzed[block->getBlockID()] = true;
749 // Merge in values of predecessor blocks.
751 for (CFGBlock::const_pred_iterator I = block->pred_begin(),
752 E = block->pred_end(); I != E; ++I) {
753 const CFGBlock *pred = *I;
754 if (wasAnalyzed[pred->getBlockID()]) {
755 vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
759 // Apply the transfer function.
760 TransferFunctions tf(vals, cfg, block, ac, classification, handler);
761 for (CFGBlock::const_iterator I = block->begin(), E = block->end();
763 if (Optional<CFGStmt> cs = I->getAs<CFGStmt>())
764 tf.Visit(const_cast<Stmt*>(cs->getStmt()));
766 return vals.updateValueVectorWithScratch(block);
769 /// PruneBlocksHandler is a special UninitVariablesHandler that is used
770 /// to detect when a CFGBlock has any *potential* use of an uninitialized
771 /// variable. It is mainly used to prune out work during the final
774 struct PruneBlocksHandler : public UninitVariablesHandler {
775 PruneBlocksHandler(unsigned numBlocks)
776 : hadUse(numBlocks, false), hadAnyUse(false),
779 virtual ~PruneBlocksHandler() {}
781 /// Records if a CFGBlock had a potential use of an uninitialized variable.
782 llvm::BitVector hadUse;
784 /// Records if any CFGBlock had a potential use of an uninitialized variable.
787 /// The current block to scribble use information.
788 unsigned currentBlock;
790 virtual void handleUseOfUninitVariable(const VarDecl *vd,
791 const UninitUse &use) {
792 hadUse[currentBlock] = true;
796 /// Called when the uninitialized variable analysis detects the
797 /// idiom 'int x = x'. All other uses of 'x' within the initializer
798 /// are handled by handleUseOfUninitVariable.
799 virtual void handleSelfInit(const VarDecl *vd) {
800 hadUse[currentBlock] = true;
806 void clang::runUninitializedVariablesAnalysis(
807 const DeclContext &dc,
809 AnalysisDeclContext &ac,
810 UninitVariablesHandler &handler,
811 UninitVariablesAnalysisStats &stats) {
812 CFGBlockValues vals(cfg);
813 vals.computeSetOfDeclarations(dc);
814 if (vals.hasNoDeclarations())
817 stats.NumVariablesAnalyzed = vals.getNumEntries();
819 // Precompute which expressions are uses and which are initializations.
820 ClassifyRefs classification(ac);
821 cfg.VisitBlockStmts(classification);
823 // Mark all variables uninitialized at the entry.
824 const CFGBlock &entry = cfg.getEntry();
825 ValueVector &vec = vals.getValueVector(&entry);
826 const unsigned n = vals.getNumEntries();
827 for (unsigned j = 0; j < n ; ++j) {
828 vec[j] = Uninitialized;
831 // Proceed with the workist.
832 DataflowWorklist worklist(cfg, *ac.getAnalysis<PostOrderCFGView>());
833 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
834 worklist.enqueueSuccessors(&cfg.getEntry());
835 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
836 wasAnalyzed[cfg.getEntry().getBlockID()] = true;
837 PruneBlocksHandler PBH(cfg.getNumBlockIDs());
839 while (const CFGBlock *block = worklist.dequeue()) {
840 PBH.currentBlock = block->getBlockID();
842 // Did the block change?
843 bool changed = runOnBlock(block, cfg, ac, vals,
844 classification, wasAnalyzed, PBH);
845 ++stats.NumBlockVisits;
846 if (changed || !previouslyVisited[block->getBlockID()])
847 worklist.enqueueSuccessors(block);
848 previouslyVisited[block->getBlockID()] = true;
854 // Run through the blocks one more time, and report uninitialized variables.
855 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
856 const CFGBlock *block = *BI;
857 if (PBH.hadUse[block->getBlockID()]) {
858 runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
859 ++stats.NumBlockVisits;
864 UninitVariablesHandler::~UninitVariablesHandler() {}