1 //==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements uninitialized values analysis for source-level CFGs.
12 //===----------------------------------------------------------------------===//
15 #include "llvm/ADT/Optional.h"
16 #include "llvm/ADT/SmallVector.h"
17 #include "llvm/ADT/PackedVector.h"
18 #include "llvm/ADT/DenseMap.h"
19 #include "clang/AST/ASTContext.h"
20 #include "clang/AST/Decl.h"
21 #include "clang/Analysis/CFG.h"
22 #include "clang/Analysis/AnalysisContext.h"
23 #include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
24 #include "clang/Analysis/Analyses/UninitializedValues.h"
25 #include "llvm/Support/SaveAndRestore.h"
27 using namespace clang;
29 #define DEBUG_LOGGING 0
31 static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
32 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
33 !vd->isExceptionVariable() &&
34 vd->getDeclContext() == dc) {
35 QualType ty = vd->getType();
36 return ty->isScalarType() || ty->isVectorType();
41 //------------------------------------------------------------------------====//
42 // DeclToIndex: a mapping from Decls we track to value indices.
43 //====------------------------------------------------------------------------//
47 llvm::DenseMap<const VarDecl *, unsigned> map;
51 /// Compute the actual mapping from declarations to bits.
52 void computeMap(const DeclContext &dc);
54 /// Return the number of declarations in the map.
55 unsigned size() const { return map.size(); }
57 /// Returns the bit vector index for a given declaration.
58 llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
62 void DeclToIndex::computeMap(const DeclContext &dc) {
64 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
66 for ( ; I != E; ++I) {
67 const VarDecl *vd = *I;
68 if (isTrackedVar(vd, &dc))
73 llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
74 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
76 return llvm::Optional<unsigned>();
80 //------------------------------------------------------------------------====//
81 // CFGBlockValues: dataflow values for CFG blocks.
82 //====------------------------------------------------------------------------//
84 // These values are defined in such a way that a merge can be done using
86 enum Value { Unknown = 0x0, /* 00 */
87 Initialized = 0x1, /* 01 */
88 Uninitialized = 0x2, /* 10 */
89 MayUninitialized = 0x3 /* 11 */ };
91 static bool isUninitialized(const Value v) {
92 return v >= Uninitialized;
94 static bool isAlwaysUninit(const Value v) {
95 return v == Uninitialized;
100 typedef llvm::PackedVector<Value, 2> ValueVector;
102 class CFGBlockValues {
104 std::vector<ValueVector*> vals;
106 DeclToIndex declToIndex;
108 CFGBlockValues(const CFG &cfg);
111 unsigned getNumEntries() const { return declToIndex.size(); }
113 void computeSetOfDeclarations(const DeclContext &dc);
114 ValueVector &getValueVector(const CFGBlock *block) {
115 return *vals[block->getBlockID()];
118 void setAllScratchValues(Value V);
119 void mergeIntoScratch(ValueVector const &source, bool isFirst);
120 bool updateValueVectorWithScratch(const CFGBlock *block);
122 bool hasNoDeclarations() const {
123 return declToIndex.size() == 0;
128 ValueVector::reference operator[](const VarDecl *vd);
130 Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
132 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
133 assert(idx.hasValue());
134 return getValueVector(block)[idx.getValue()];
137 } // end anonymous namespace
139 CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
141 CFGBlockValues::~CFGBlockValues() {
142 for (std::vector<ValueVector*>::iterator I = vals.begin(), E = vals.end();
147 void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
148 declToIndex.computeMap(dc);
149 unsigned decls = declToIndex.size();
150 scratch.resize(decls);
151 unsigned n = cfg.getNumBlockIDs();
155 for (unsigned i = 0; i < n; ++i)
156 vals[i] = new ValueVector(decls);
160 static void printVector(const CFGBlock *block, ValueVector &bv,
162 llvm::errs() << block->getBlockID() << " :";
163 for (unsigned i = 0; i < bv.size(); ++i) {
164 llvm::errs() << ' ' << bv[i];
166 llvm::errs() << " : " << num << '\n';
170 void CFGBlockValues::setAllScratchValues(Value V) {
171 for (unsigned I = 0, E = scratch.size(); I != E; ++I)
175 void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
183 bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
184 ValueVector &dst = getValueVector(block);
185 bool changed = (dst != scratch);
189 printVector(block, scratch, 0);
194 void CFGBlockValues::resetScratch() {
198 ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
199 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
200 assert(idx.hasValue());
201 return scratch[idx.getValue()];
204 //------------------------------------------------------------------------====//
205 // Worklist: worklist for dataflow analysis.
206 //====------------------------------------------------------------------------//
209 class DataflowWorklist {
210 SmallVector<const CFGBlock *, 20> worklist;
211 llvm::BitVector enqueuedBlocks;
213 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
215 void enqueueSuccessors(const CFGBlock *block);
216 const CFGBlock *dequeue();
220 void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
221 unsigned OldWorklistSize = worklist.size();
222 for (CFGBlock::const_succ_iterator I = block->succ_begin(),
223 E = block->succ_end(); I != E; ++I) {
224 const CFGBlock *Successor = *I;
225 if (!Successor || enqueuedBlocks[Successor->getBlockID()])
227 worklist.push_back(Successor);
228 enqueuedBlocks[Successor->getBlockID()] = true;
230 if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
233 // Rotate the newly added blocks to the start of the worklist so that it forms
234 // a proper queue when we pop off the end of the worklist.
235 std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
239 const CFGBlock *DataflowWorklist::dequeue() {
240 if (worklist.empty())
242 const CFGBlock *b = worklist.back();
244 enqueuedBlocks[b->getBlockID()] = false;
248 //------------------------------------------------------------------------====//
249 // Classification of DeclRefExprs as use or initialization.
250 //====------------------------------------------------------------------------//
253 class FindVarResult {
255 const DeclRefExpr *dr;
257 FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
259 const DeclRefExpr *getDeclRefExpr() const { return dr; }
260 const VarDecl *getDecl() const { return vd; }
263 static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
265 Ex = Ex->IgnoreParenNoopCasts(C);
266 if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
267 if (CE->getCastKind() == CK_LValueBitCast) {
268 Ex = CE->getSubExpr();
277 /// If E is an expression comprising a reference to a single variable, find that
279 static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
280 if (const DeclRefExpr *DRE =
281 dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
282 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
283 if (isTrackedVar(VD, DC))
284 return FindVarResult(VD, DRE);
285 return FindVarResult(0, 0);
288 /// \brief Classify each DeclRefExpr as an initialization or a use. Any
289 /// DeclRefExpr which isn't explicitly classified will be assumed to have
290 /// escaped the analysis and will be treated as an initialization.
291 class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
301 const DeclContext *DC;
302 llvm::DenseMap<const DeclRefExpr*, Class> Classification;
304 bool isTrackedVar(const VarDecl *VD) const {
305 return ::isTrackedVar(VD, DC);
308 void classify(const Expr *E, Class C);
311 ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
313 void VisitDeclStmt(DeclStmt *DS);
314 void VisitUnaryOperator(UnaryOperator *UO);
315 void VisitBinaryOperator(BinaryOperator *BO);
316 void VisitCallExpr(CallExpr *CE);
317 void VisitCastExpr(CastExpr *CE);
319 void operator()(Stmt *S) { Visit(S); }
321 Class get(const DeclRefExpr *DRE) const {
322 llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
323 = Classification.find(DRE);
324 if (I != Classification.end())
327 const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
328 if (!VD || !isTrackedVar(VD))
336 static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
337 if (Expr *Init = VD->getInit()) {
338 const DeclRefExpr *DRE
339 = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
340 if (DRE && DRE->getDecl() == VD)
346 void ClassifyRefs::classify(const Expr *E, Class C) {
347 FindVarResult Var = findVar(E, DC);
348 if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
349 Classification[DRE] = std::max(Classification[DRE], C);
352 void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
353 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
355 VarDecl *VD = dyn_cast<VarDecl>(*DI);
356 if (VD && isTrackedVar(VD))
357 if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
358 Classification[DRE] = SelfInit;
362 void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
363 // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
364 // is not a compound-assignment, we will treat it as initializing the variable
365 // when TransferFunctions visits it. A compound-assignment does not affect
366 // whether a variable is uninitialized, and there's no point counting it as a
368 if (BO->isCompoundAssignmentOp())
369 classify(BO->getLHS(), Use);
370 else if (BO->getOpcode() == BO_Assign)
371 classify(BO->getLHS(), Ignore);
374 void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
375 // Increment and decrement are uses despite there being no lvalue-to-rvalue
377 if (UO->isIncrementDecrementOp())
378 classify(UO->getSubExpr(), Use);
381 void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
382 // If a value is passed by const reference to a function, we should not assume
383 // that it is initialized by the call, and we conservatively do not assume
385 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
387 if ((*I)->getType().isConstQualified() && (*I)->isGLValue())
388 classify(*I, Ignore);
391 void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
392 if (CE->getCastKind() == CK_LValueToRValue)
393 classify(CE->getSubExpr(), Use);
394 else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
395 if (CSE->getType()->isVoidType()) {
396 // Squelch any detected load of an uninitialized value if
397 // we cast it to void.
399 classify(CSE->getSubExpr(), Ignore);
404 //------------------------------------------------------------------------====//
405 // Transfer function for uninitialized values analysis.
406 //====------------------------------------------------------------------------//
409 class TransferFunctions : public StmtVisitor<TransferFunctions> {
410 CFGBlockValues &vals;
412 const CFGBlock *block;
413 AnalysisDeclContext ∾
414 const ClassifyRefs &classification;
415 UninitVariablesHandler *handler;
418 TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
419 const CFGBlock *block, AnalysisDeclContext &ac,
420 const ClassifyRefs &classification,
421 UninitVariablesHandler *handler)
422 : vals(vals), cfg(cfg), block(block), ac(ac),
423 classification(classification), handler(handler) {}
425 void reportUse(const Expr *ex, const VarDecl *vd);
427 void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
428 void VisitBlockExpr(BlockExpr *be);
429 void VisitCallExpr(CallExpr *ce);
430 void VisitDeclStmt(DeclStmt *ds);
431 void VisitDeclRefExpr(DeclRefExpr *dr);
432 void VisitBinaryOperator(BinaryOperator *bo);
434 bool isTrackedVar(const VarDecl *vd) {
435 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
438 FindVarResult findVar(const Expr *ex) {
439 return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
442 UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
443 UninitUse Use(ex, isAlwaysUninit(v));
445 assert(isUninitialized(v));
446 if (Use.getKind() == UninitUse::Always)
449 // If an edge which leads unconditionally to this use did not initialize
450 // the variable, we can say something stronger than 'may be uninitialized':
451 // we can say 'either it's used uninitialized or you have dead code'.
453 // We track the number of successors of a node which have been visited, and
454 // visit a node once we have visited all of its successors. Only edges where
455 // the variable might still be uninitialized are followed. Since a variable
456 // can't transfer from being initialized to being uninitialized, this will
457 // trace out the subgraph which inevitably leads to the use and does not
458 // initialize the variable. We do not want to skip past loops, since their
459 // non-termination might be correlated with the initialization condition.
463 // void f(bool a, bool b) {
468 // block4: } else if (b) {
469 // block5: while (!a) {
470 // block6: do_work(&a);
479 // Starting from the maybe-uninitialized use in block 9:
480 // * Block 7 is not visited because we have only visited one of its two
482 // * Block 8 is visited because we've visited its only successor.
484 // * Block 7 is visited because we've now visited both of its successors.
486 // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
487 // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
488 // * Block 3 is not visited because it initializes 'n'.
489 // Now the algorithm terminates, having visited blocks 7 and 8, and having
490 // found the frontier is blocks 2, 4, and 5.
492 // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
493 // and 4), so we report that any time either of those edges is taken (in
494 // each case when 'b == false'), 'n' is used uninitialized.
495 llvm::SmallVector<const CFGBlock*, 32> Queue;
496 llvm::SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
497 Queue.push_back(block);
498 // Specify that we've already visited all successors of the starting block.
499 // This has the dual purpose of ensuring we never add it to the queue, and
500 // of marking it as not being a candidate element of the frontier.
501 SuccsVisited[block->getBlockID()] = block->succ_size();
502 while (!Queue.empty()) {
503 const CFGBlock *B = Queue.back();
505 for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
507 const CFGBlock *Pred = *I;
508 if (vals.getValue(Pred, B, vd) == Initialized)
509 // This block initializes the variable.
512 unsigned &SV = SuccsVisited[Pred->getBlockID()];
514 // When visiting the first successor of a block, mark all NULL
515 // successors as having been visited.
516 for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
517 SE = Pred->succ_end();
523 if (++SV == Pred->succ_size())
524 // All paths from this block lead to the use and don't initialize the
526 Queue.push_back(Pred);
530 // Scan the frontier, looking for blocks where the variable was
532 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
533 const CFGBlock *Block = *BI;
534 unsigned BlockID = Block->getBlockID();
535 const Stmt *Term = Block->getTerminator();
536 if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
538 // This block inevitably leads to the use. If we have an edge from here
539 // to a post-dominator block, and the variable is uninitialized on that
540 // edge, we have found a bug.
541 for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
542 E = Block->succ_end(); I != E; ++I) {
543 const CFGBlock *Succ = *I;
544 if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
545 vals.getValue(Block, Succ, vd) == Uninitialized) {
546 // Switch cases are a special case: report the label to the caller
547 // as the 'terminator', not the switch statement itself. Suppress
548 // situations where no label matched: we can't be sure that's
550 if (isa<SwitchStmt>(Term)) {
551 const Stmt *Label = Succ->getLabel();
552 if (!Label || !isa<SwitchCase>(Label))
553 // Might not be possible.
555 UninitUse::Branch Branch;
556 Branch.Terminator = Label;
557 Branch.Output = 0; // Ignored.
558 Use.addUninitBranch(Branch);
560 UninitUse::Branch Branch;
561 Branch.Terminator = Term;
562 Branch.Output = I - Block->succ_begin();
563 Use.addUninitBranch(Branch);
575 void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
579 if (isUninitialized(v))
580 handler->handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
583 void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
584 // This represents an initialization of the 'element' value.
585 if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
586 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
587 if (isTrackedVar(VD))
588 vals[VD] = Initialized;
592 void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
593 const BlockDecl *bd = be->getBlockDecl();
594 for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
595 e = bd->capture_end() ; i != e; ++i) {
596 const VarDecl *vd = i->getVariable();
597 if (!isTrackedVar(vd))
600 vals[vd] = Initialized;
607 void TransferFunctions::VisitCallExpr(CallExpr *ce) {
608 // After a call to a function like setjmp or vfork, any variable which is
609 // initialized anywhere within this function may now be initialized. For now,
610 // just assume such a call initializes all variables.
611 // FIXME: Only mark variables as initialized if they have an initializer which
612 // is reachable from here.
613 Decl *Callee = ce->getCalleeDecl();
614 if (Callee && Callee->hasAttr<ReturnsTwiceAttr>())
615 vals.setAllScratchValues(Initialized);
618 void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
619 switch (classification.get(dr)) {
620 case ClassifyRefs::Ignore:
622 case ClassifyRefs::Use:
623 reportUse(dr, cast<VarDecl>(dr->getDecl()));
625 case ClassifyRefs::Init:
626 vals[cast<VarDecl>(dr->getDecl())] = Initialized;
628 case ClassifyRefs::SelfInit:
630 handler->handleSelfInit(cast<VarDecl>(dr->getDecl()));
635 void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
636 if (BO->getOpcode() == BO_Assign) {
637 FindVarResult Var = findVar(BO->getLHS());
638 if (const VarDecl *VD = Var.getDecl())
639 vals[VD] = Initialized;
643 void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
644 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
646 VarDecl *VD = dyn_cast<VarDecl>(*DI);
647 if (VD && isTrackedVar(VD)) {
648 if (getSelfInitExpr(VD)) {
649 // If the initializer consists solely of a reference to itself, we
650 // explicitly mark the variable as uninitialized. This allows code
651 // like the following:
655 // to deliberately leave a variable uninitialized. Different analysis
656 // clients can detect this pattern and adjust their reporting
657 // appropriately, but we need to continue to analyze subsequent uses
659 vals[VD] = Uninitialized;
660 } else if (VD->getInit()) {
661 // Treat the new variable as initialized.
662 vals[VD] = Initialized;
664 // No initializer: the variable is now uninitialized. This matters
671 // FIXME: Mark the variable as uninitialized whenever its scope is
672 // left, since its scope could be re-entered by a jump over the
674 vals[VD] = Uninitialized;
680 //------------------------------------------------------------------------====//
681 // High-level "driver" logic for uninitialized values analysis.
682 //====------------------------------------------------------------------------//
684 static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
685 AnalysisDeclContext &ac, CFGBlockValues &vals,
686 const ClassifyRefs &classification,
687 llvm::BitVector &wasAnalyzed,
688 UninitVariablesHandler *handler = 0) {
689 wasAnalyzed[block->getBlockID()] = true;
691 // Merge in values of predecessor blocks.
693 for (CFGBlock::const_pred_iterator I = block->pred_begin(),
694 E = block->pred_end(); I != E; ++I) {
695 const CFGBlock *pred = *I;
696 if (wasAnalyzed[pred->getBlockID()]) {
697 vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
701 // Apply the transfer function.
702 TransferFunctions tf(vals, cfg, block, ac, classification, handler);
703 for (CFGBlock::const_iterator I = block->begin(), E = block->end();
705 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
706 tf.Visit(const_cast<Stmt*>(cs->getStmt()));
709 return vals.updateValueVectorWithScratch(block);
712 void clang::runUninitializedVariablesAnalysis(
713 const DeclContext &dc,
715 AnalysisDeclContext &ac,
716 UninitVariablesHandler &handler,
717 UninitVariablesAnalysisStats &stats) {
718 CFGBlockValues vals(cfg);
719 vals.computeSetOfDeclarations(dc);
720 if (vals.hasNoDeclarations())
723 stats.NumVariablesAnalyzed = vals.getNumEntries();
725 // Precompute which expressions are uses and which are initializations.
726 ClassifyRefs classification(ac);
727 cfg.VisitBlockStmts(classification);
729 // Mark all variables uninitialized at the entry.
730 const CFGBlock &entry = cfg.getEntry();
731 ValueVector &vec = vals.getValueVector(&entry);
732 const unsigned n = vals.getNumEntries();
733 for (unsigned j = 0; j < n ; ++j) {
734 vec[j] = Uninitialized;
737 // Proceed with the workist.
738 DataflowWorklist worklist(cfg);
739 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
740 worklist.enqueueSuccessors(&cfg.getEntry());
741 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
742 wasAnalyzed[cfg.getEntry().getBlockID()] = true;
744 while (const CFGBlock *block = worklist.dequeue()) {
745 // Did the block change?
746 bool changed = runOnBlock(block, cfg, ac, vals,
747 classification, wasAnalyzed);
748 ++stats.NumBlockVisits;
749 if (changed || !previouslyVisited[block->getBlockID()])
750 worklist.enqueueSuccessors(block);
751 previouslyVisited[block->getBlockID()] = true;
754 // Run through the blocks one more time, and report uninitialized variabes.
755 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
756 const CFGBlock *block = *BI;
757 if (wasAnalyzed[block->getBlockID()]) {
758 runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, &handler);
759 ++stats.NumBlockVisits;
764 UninitVariablesHandler::~UninitVariablesHandler() {}