1 //==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements uninitialized values analysis for source-level CFGs.
12 //===----------------------------------------------------------------------===//
15 #include "llvm/ADT/Optional.h"
16 #include "llvm/ADT/SmallBitVector.h"
17 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/ADT/PackedVector.h"
19 #include "llvm/ADT/DenseMap.h"
20 #include "clang/AST/ASTContext.h"
21 #include "clang/AST/Decl.h"
22 #include "clang/Analysis/CFG.h"
23 #include "clang/Analysis/AnalysisContext.h"
24 #include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
25 #include "clang/Analysis/Analyses/UninitializedValues.h"
26 #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
27 #include "llvm/Support/SaveAndRestore.h"
29 using namespace clang;
31 #define DEBUG_LOGGING 0
33 static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
34 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
35 !vd->isExceptionVariable() &&
36 vd->getDeclContext() == dc) {
37 QualType ty = vd->getType();
38 return ty->isScalarType() || ty->isVectorType();
43 //------------------------------------------------------------------------====//
44 // DeclToIndex: a mapping from Decls we track to value indices.
45 //====------------------------------------------------------------------------//
49 llvm::DenseMap<const VarDecl *, unsigned> map;
53 /// Compute the actual mapping from declarations to bits.
54 void computeMap(const DeclContext &dc);
56 /// Return the number of declarations in the map.
57 unsigned size() const { return map.size(); }
59 /// Returns the bit vector index for a given declaration.
60 llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
64 void DeclToIndex::computeMap(const DeclContext &dc) {
66 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
68 for ( ; I != E; ++I) {
69 const VarDecl *vd = *I;
70 if (isTrackedVar(vd, &dc))
75 llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
76 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
78 return llvm::Optional<unsigned>();
82 //------------------------------------------------------------------------====//
83 // CFGBlockValues: dataflow values for CFG blocks.
84 //====------------------------------------------------------------------------//
86 // These values are defined in such a way that a merge can be done using
88 enum Value { Unknown = 0x0, /* 00 */
89 Initialized = 0x1, /* 01 */
90 Uninitialized = 0x2, /* 10 */
91 MayUninitialized = 0x3 /* 11 */ };
93 static bool isUninitialized(const Value v) {
94 return v >= Uninitialized;
96 static bool isAlwaysUninit(const Value v) {
97 return v == Uninitialized;
102 typedef llvm::PackedVector<Value, 2, llvm::SmallBitVector> ValueVector;
104 class CFGBlockValues {
106 SmallVector<ValueVector, 8> vals;
108 DeclToIndex declToIndex;
110 CFGBlockValues(const CFG &cfg);
112 unsigned getNumEntries() const { return declToIndex.size(); }
114 void computeSetOfDeclarations(const DeclContext &dc);
115 ValueVector &getValueVector(const CFGBlock *block) {
116 return vals[block->getBlockID()];
119 void setAllScratchValues(Value V);
120 void mergeIntoScratch(ValueVector const &source, bool isFirst);
121 bool updateValueVectorWithScratch(const CFGBlock *block);
123 bool hasNoDeclarations() const {
124 return declToIndex.size() == 0;
129 ValueVector::reference operator[](const VarDecl *vd);
131 Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
133 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
134 assert(idx.hasValue());
135 return getValueVector(block)[idx.getValue()];
138 } // end anonymous namespace
140 CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
142 void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
143 declToIndex.computeMap(dc);
144 unsigned decls = declToIndex.size();
145 scratch.resize(decls);
146 unsigned n = cfg.getNumBlockIDs();
150 for (unsigned i = 0; i < n; ++i)
151 vals[i].resize(decls);
155 static void printVector(const CFGBlock *block, ValueVector &bv,
157 llvm::errs() << block->getBlockID() << " :";
158 for (unsigned i = 0; i < bv.size(); ++i) {
159 llvm::errs() << ' ' << bv[i];
161 llvm::errs() << " : " << num << '\n';
165 void CFGBlockValues::setAllScratchValues(Value V) {
166 for (unsigned I = 0, E = scratch.size(); I != E; ++I)
170 void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
178 bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
179 ValueVector &dst = getValueVector(block);
180 bool changed = (dst != scratch);
184 printVector(block, scratch, 0);
189 void CFGBlockValues::resetScratch() {
193 ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
194 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
195 assert(idx.hasValue());
196 return scratch[idx.getValue()];
199 //------------------------------------------------------------------------====//
200 // Worklist: worklist for dataflow analysis.
201 //====------------------------------------------------------------------------//
204 class DataflowWorklist {
205 SmallVector<const CFGBlock *, 20> worklist;
206 llvm::BitVector enqueuedBlocks;
208 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
210 void enqueueSuccessors(const CFGBlock *block);
211 const CFGBlock *dequeue();
215 void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
216 unsigned OldWorklistSize = worklist.size();
217 for (CFGBlock::const_succ_iterator I = block->succ_begin(),
218 E = block->succ_end(); I != E; ++I) {
219 const CFGBlock *Successor = *I;
220 if (!Successor || enqueuedBlocks[Successor->getBlockID()])
222 worklist.push_back(Successor);
223 enqueuedBlocks[Successor->getBlockID()] = true;
225 if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
228 // Rotate the newly added blocks to the start of the worklist so that it forms
229 // a proper queue when we pop off the end of the worklist.
230 std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
234 const CFGBlock *DataflowWorklist::dequeue() {
235 if (worklist.empty())
237 const CFGBlock *b = worklist.back();
239 enqueuedBlocks[b->getBlockID()] = false;
243 //------------------------------------------------------------------------====//
244 // Classification of DeclRefExprs as use or initialization.
245 //====------------------------------------------------------------------------//
248 class FindVarResult {
250 const DeclRefExpr *dr;
252 FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
254 const DeclRefExpr *getDeclRefExpr() const { return dr; }
255 const VarDecl *getDecl() const { return vd; }
258 static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
260 Ex = Ex->IgnoreParenNoopCasts(C);
261 if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
262 if (CE->getCastKind() == CK_LValueBitCast) {
263 Ex = CE->getSubExpr();
272 /// If E is an expression comprising a reference to a single variable, find that
274 static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
275 if (const DeclRefExpr *DRE =
276 dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
277 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
278 if (isTrackedVar(VD, DC))
279 return FindVarResult(VD, DRE);
280 return FindVarResult(0, 0);
283 /// \brief Classify each DeclRefExpr as an initialization or a use. Any
284 /// DeclRefExpr which isn't explicitly classified will be assumed to have
285 /// escaped the analysis and will be treated as an initialization.
286 class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
296 const DeclContext *DC;
297 llvm::DenseMap<const DeclRefExpr*, Class> Classification;
299 bool isTrackedVar(const VarDecl *VD) const {
300 return ::isTrackedVar(VD, DC);
303 void classify(const Expr *E, Class C);
306 ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
308 void VisitDeclStmt(DeclStmt *DS);
309 void VisitUnaryOperator(UnaryOperator *UO);
310 void VisitBinaryOperator(BinaryOperator *BO);
311 void VisitCallExpr(CallExpr *CE);
312 void VisitCastExpr(CastExpr *CE);
314 void operator()(Stmt *S) { Visit(S); }
316 Class get(const DeclRefExpr *DRE) const {
317 llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
318 = Classification.find(DRE);
319 if (I != Classification.end())
322 const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
323 if (!VD || !isTrackedVar(VD))
331 static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
332 if (Expr *Init = VD->getInit()) {
333 const DeclRefExpr *DRE
334 = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
335 if (DRE && DRE->getDecl() == VD)
341 void ClassifyRefs::classify(const Expr *E, Class C) {
342 FindVarResult Var = findVar(E, DC);
343 if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
344 Classification[DRE] = std::max(Classification[DRE], C);
347 void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
348 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
350 VarDecl *VD = dyn_cast<VarDecl>(*DI);
351 if (VD && isTrackedVar(VD))
352 if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
353 Classification[DRE] = SelfInit;
357 void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
358 // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
359 // is not a compound-assignment, we will treat it as initializing the variable
360 // when TransferFunctions visits it. A compound-assignment does not affect
361 // whether a variable is uninitialized, and there's no point counting it as a
363 if (BO->isCompoundAssignmentOp())
364 classify(BO->getLHS(), Use);
365 else if (BO->getOpcode() == BO_Assign)
366 classify(BO->getLHS(), Ignore);
369 void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
370 // Increment and decrement are uses despite there being no lvalue-to-rvalue
372 if (UO->isIncrementDecrementOp())
373 classify(UO->getSubExpr(), Use);
376 void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
377 // If a value is passed by const reference to a function, we should not assume
378 // that it is initialized by the call, and we conservatively do not assume
380 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
382 if ((*I)->getType().isConstQualified() && (*I)->isGLValue())
383 classify(*I, Ignore);
386 void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
387 if (CE->getCastKind() == CK_LValueToRValue)
388 classify(CE->getSubExpr(), Use);
389 else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
390 if (CSE->getType()->isVoidType()) {
391 // Squelch any detected load of an uninitialized value if
392 // we cast it to void.
394 classify(CSE->getSubExpr(), Ignore);
399 //------------------------------------------------------------------------====//
400 // Transfer function for uninitialized values analysis.
401 //====------------------------------------------------------------------------//
404 class TransferFunctions : public StmtVisitor<TransferFunctions> {
405 CFGBlockValues &vals;
407 const CFGBlock *block;
408 AnalysisDeclContext ∾
409 const ClassifyRefs &classification;
410 ObjCNoReturn objCNoRet;
411 UninitVariablesHandler *handler;
414 TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
415 const CFGBlock *block, AnalysisDeclContext &ac,
416 const ClassifyRefs &classification,
417 UninitVariablesHandler *handler)
418 : vals(vals), cfg(cfg), block(block), ac(ac),
419 classification(classification), objCNoRet(ac.getASTContext()),
422 void reportUse(const Expr *ex, const VarDecl *vd);
424 void VisitBinaryOperator(BinaryOperator *bo);
425 void VisitBlockExpr(BlockExpr *be);
426 void VisitCallExpr(CallExpr *ce);
427 void VisitDeclRefExpr(DeclRefExpr *dr);
428 void VisitDeclStmt(DeclStmt *ds);
429 void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
430 void VisitObjCMessageExpr(ObjCMessageExpr *ME);
432 bool isTrackedVar(const VarDecl *vd) {
433 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
436 FindVarResult findVar(const Expr *ex) {
437 return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
440 UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
441 UninitUse Use(ex, isAlwaysUninit(v));
443 assert(isUninitialized(v));
444 if (Use.getKind() == UninitUse::Always)
447 // If an edge which leads unconditionally to this use did not initialize
448 // the variable, we can say something stronger than 'may be uninitialized':
449 // we can say 'either it's used uninitialized or you have dead code'.
451 // We track the number of successors of a node which have been visited, and
452 // visit a node once we have visited all of its successors. Only edges where
453 // the variable might still be uninitialized are followed. Since a variable
454 // can't transfer from being initialized to being uninitialized, this will
455 // trace out the subgraph which inevitably leads to the use and does not
456 // initialize the variable. We do not want to skip past loops, since their
457 // non-termination might be correlated with the initialization condition.
461 // void f(bool a, bool b) {
466 // block4: } else if (b) {
467 // block5: while (!a) {
468 // block6: do_work(&a);
477 // Starting from the maybe-uninitialized use in block 9:
478 // * Block 7 is not visited because we have only visited one of its two
480 // * Block 8 is visited because we've visited its only successor.
482 // * Block 7 is visited because we've now visited both of its successors.
484 // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
485 // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
486 // * Block 3 is not visited because it initializes 'n'.
487 // Now the algorithm terminates, having visited blocks 7 and 8, and having
488 // found the frontier is blocks 2, 4, and 5.
490 // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
491 // and 4), so we report that any time either of those edges is taken (in
492 // each case when 'b == false'), 'n' is used uninitialized.
493 llvm::SmallVector<const CFGBlock*, 32> Queue;
494 llvm::SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
495 Queue.push_back(block);
496 // Specify that we've already visited all successors of the starting block.
497 // This has the dual purpose of ensuring we never add it to the queue, and
498 // of marking it as not being a candidate element of the frontier.
499 SuccsVisited[block->getBlockID()] = block->succ_size();
500 while (!Queue.empty()) {
501 const CFGBlock *B = Queue.back();
503 for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
505 const CFGBlock *Pred = *I;
506 if (vals.getValue(Pred, B, vd) == Initialized)
507 // This block initializes the variable.
510 unsigned &SV = SuccsVisited[Pred->getBlockID()];
512 // When visiting the first successor of a block, mark all NULL
513 // successors as having been visited.
514 for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
515 SE = Pred->succ_end();
521 if (++SV == Pred->succ_size())
522 // All paths from this block lead to the use and don't initialize the
524 Queue.push_back(Pred);
528 // Scan the frontier, looking for blocks where the variable was
530 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
531 const CFGBlock *Block = *BI;
532 unsigned BlockID = Block->getBlockID();
533 const Stmt *Term = Block->getTerminator();
534 if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
536 // This block inevitably leads to the use. If we have an edge from here
537 // to a post-dominator block, and the variable is uninitialized on that
538 // edge, we have found a bug.
539 for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
540 E = Block->succ_end(); I != E; ++I) {
541 const CFGBlock *Succ = *I;
542 if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
543 vals.getValue(Block, Succ, vd) == Uninitialized) {
544 // Switch cases are a special case: report the label to the caller
545 // as the 'terminator', not the switch statement itself. Suppress
546 // situations where no label matched: we can't be sure that's
548 if (isa<SwitchStmt>(Term)) {
549 const Stmt *Label = Succ->getLabel();
550 if (!Label || !isa<SwitchCase>(Label))
551 // Might not be possible.
553 UninitUse::Branch Branch;
554 Branch.Terminator = Label;
555 Branch.Output = 0; // Ignored.
556 Use.addUninitBranch(Branch);
558 UninitUse::Branch Branch;
559 Branch.Terminator = Term;
560 Branch.Output = I - Block->succ_begin();
561 Use.addUninitBranch(Branch);
573 void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
577 if (isUninitialized(v))
578 handler->handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
581 void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
582 // This represents an initialization of the 'element' value.
583 if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
584 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
585 if (isTrackedVar(VD))
586 vals[VD] = Initialized;
590 void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
591 const BlockDecl *bd = be->getBlockDecl();
592 for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
593 e = bd->capture_end() ; i != e; ++i) {
594 const VarDecl *vd = i->getVariable();
595 if (!isTrackedVar(vd))
598 vals[vd] = Initialized;
605 void TransferFunctions::VisitCallExpr(CallExpr *ce) {
606 if (Decl *Callee = ce->getCalleeDecl()) {
607 if (Callee->hasAttr<ReturnsTwiceAttr>()) {
608 // After a call to a function like setjmp or vfork, any variable which is
609 // initialized anywhere within this function may now be initialized. For
610 // now, just assume such a call initializes all variables. FIXME: Only
611 // mark variables as initialized if they have an initializer which is
612 // reachable from here.
613 vals.setAllScratchValues(Initialized);
615 else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
616 // Functions labeled like "analyzer_noreturn" are often used to denote
617 // "panic" functions that in special debug situations can still return,
618 // but for the most part should not be treated as returning. This is a
619 // useful annotation borrowed from the static analyzer that is useful for
620 // suppressing branch-specific false positives when we call one of these
621 // functions but keep pretending the path continues (when in reality the
622 // user doesn't care).
623 vals.setAllScratchValues(Unknown);
628 void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
629 switch (classification.get(dr)) {
630 case ClassifyRefs::Ignore:
632 case ClassifyRefs::Use:
633 reportUse(dr, cast<VarDecl>(dr->getDecl()));
635 case ClassifyRefs::Init:
636 vals[cast<VarDecl>(dr->getDecl())] = Initialized;
638 case ClassifyRefs::SelfInit:
640 handler->handleSelfInit(cast<VarDecl>(dr->getDecl()));
645 void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
646 if (BO->getOpcode() == BO_Assign) {
647 FindVarResult Var = findVar(BO->getLHS());
648 if (const VarDecl *VD = Var.getDecl())
649 vals[VD] = Initialized;
653 void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
654 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
656 VarDecl *VD = dyn_cast<VarDecl>(*DI);
657 if (VD && isTrackedVar(VD)) {
658 if (getSelfInitExpr(VD)) {
659 // If the initializer consists solely of a reference to itself, we
660 // explicitly mark the variable as uninitialized. This allows code
661 // like the following:
665 // to deliberately leave a variable uninitialized. Different analysis
666 // clients can detect this pattern and adjust their reporting
667 // appropriately, but we need to continue to analyze subsequent uses
669 vals[VD] = Uninitialized;
670 } else if (VD->getInit()) {
671 // Treat the new variable as initialized.
672 vals[VD] = Initialized;
674 // No initializer: the variable is now uninitialized. This matters
681 // FIXME: Mark the variable as uninitialized whenever its scope is
682 // left, since its scope could be re-entered by a jump over the
684 vals[VD] = Uninitialized;
690 void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
691 // If the Objective-C message expression is an implicit no-return that
692 // is not modeled in the CFG, set the tracked dataflow values to Unknown.
693 if (objCNoRet.isImplicitNoReturn(ME)) {
694 vals.setAllScratchValues(Unknown);
698 //------------------------------------------------------------------------====//
699 // High-level "driver" logic for uninitialized values analysis.
700 //====------------------------------------------------------------------------//
702 static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
703 AnalysisDeclContext &ac, CFGBlockValues &vals,
704 const ClassifyRefs &classification,
705 llvm::BitVector &wasAnalyzed,
706 UninitVariablesHandler *handler = 0) {
707 wasAnalyzed[block->getBlockID()] = true;
709 // Merge in values of predecessor blocks.
711 for (CFGBlock::const_pred_iterator I = block->pred_begin(),
712 E = block->pred_end(); I != E; ++I) {
713 const CFGBlock *pred = *I;
714 if (wasAnalyzed[pred->getBlockID()]) {
715 vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
719 // Apply the transfer function.
720 TransferFunctions tf(vals, cfg, block, ac, classification, handler);
721 for (CFGBlock::const_iterator I = block->begin(), E = block->end();
723 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
724 tf.Visit(const_cast<Stmt*>(cs->getStmt()));
727 return vals.updateValueVectorWithScratch(block);
730 void clang::runUninitializedVariablesAnalysis(
731 const DeclContext &dc,
733 AnalysisDeclContext &ac,
734 UninitVariablesHandler &handler,
735 UninitVariablesAnalysisStats &stats) {
736 CFGBlockValues vals(cfg);
737 vals.computeSetOfDeclarations(dc);
738 if (vals.hasNoDeclarations())
741 stats.NumVariablesAnalyzed = vals.getNumEntries();
743 // Precompute which expressions are uses and which are initializations.
744 ClassifyRefs classification(ac);
745 cfg.VisitBlockStmts(classification);
747 // Mark all variables uninitialized at the entry.
748 const CFGBlock &entry = cfg.getEntry();
749 ValueVector &vec = vals.getValueVector(&entry);
750 const unsigned n = vals.getNumEntries();
751 for (unsigned j = 0; j < n ; ++j) {
752 vec[j] = Uninitialized;
755 // Proceed with the workist.
756 DataflowWorklist worklist(cfg);
757 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
758 worklist.enqueueSuccessors(&cfg.getEntry());
759 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
760 wasAnalyzed[cfg.getEntry().getBlockID()] = true;
762 while (const CFGBlock *block = worklist.dequeue()) {
763 // Did the block change?
764 bool changed = runOnBlock(block, cfg, ac, vals,
765 classification, wasAnalyzed);
766 ++stats.NumBlockVisits;
767 if (changed || !previouslyVisited[block->getBlockID()])
768 worklist.enqueueSuccessors(block);
769 previouslyVisited[block->getBlockID()] = true;
772 // Run through the blocks one more time, and report uninitialized variabes.
773 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
774 const CFGBlock *block = *BI;
775 if (wasAnalyzed[block->getBlockID()]) {
776 runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, &handler);
777 ++stats.NumBlockVisits;
782 UninitVariablesHandler::~UninitVariablesHandler() {}