#include "clang/Analysis/Analyses/UninitializedValues.h"
#include "clang/AST/Attr.h"
#include "clang/AST/Decl.h"
#include "clang/AST/DeclBase.h"
#include "clang/AST/Expr.h"
#include "clang/AST/OperationKinds.h"
#include "clang/AST/Stmt.h"
#include "clang/AST/StmtObjC.h"
#include "clang/AST/StmtVisitor.h"
#include "clang/AST/Type.h"
#include "clang/Analysis/Analyses/PostOrderCFGView.h"
#include "clang/Analysis/AnalysisDeclContext.h"
#include "clang/Analysis/CFG.h"
#include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
#include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
#include "clang/Basic/LLVM.h"
#include "llvm/ADT/BitVector.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/None.h"
#include "llvm/ADT/Optional.h"
#include "llvm/ADT/PackedVector.h"
#include "llvm/ADT/SmallBitVector.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/Support/Casting.h"
#include <algorithm>
#include <cassert>
using namespace clang;
#define DEBUG_LOGGING 0
static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
!vd->isExceptionVariable() && !vd->isInitCapture() &&
!vd->isImplicit() && vd->getDeclContext() == dc) {
QualType ty = vd->getType();
return ty->isScalarType() || ty->isVectorType() || ty->isRecordType();
}
return false;
}
namespace {
class DeclToIndex {
llvm::DenseMap<const VarDecl *, unsigned> map;
public:
DeclToIndex() = default;
void computeMap(const DeclContext &dc);
unsigned size() const { return map.size(); }
Optional<unsigned> getValueIndex(const VarDecl *d) const;
};
}
void DeclToIndex::computeMap(const DeclContext &dc) {
unsigned count = 0;
DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
E(dc.decls_end());
for ( ; I != E; ++I) {
const VarDecl *vd = *I;
if (isTrackedVar(vd, &dc))
map[vd] = count++;
}
}
Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
if (I == map.end())
return None;
return I->second;
}
enum Value { Unknown = 0x0,
Initialized = 0x1,
Uninitialized = 0x2,
MayUninitialized = 0x3 };
static bool isUninitialized(const Value v) {
return v >= Uninitialized;
}
static bool isAlwaysUninit(const Value v) {
return v == Uninitialized;
}
namespace {
using ValueVector = llvm::PackedVector<Value, 2, llvm::SmallBitVector>;
class CFGBlockValues {
const CFG &cfg;
SmallVector<ValueVector, 8> vals;
ValueVector scratch;
DeclToIndex declToIndex;
public:
CFGBlockValues(const CFG &cfg);
unsigned getNumEntries() const { return declToIndex.size(); }
void computeSetOfDeclarations(const DeclContext &dc);
ValueVector &getValueVector(const CFGBlock *block) {
return vals[block->getBlockID()];
}
void setAllScratchValues(Value V);
void mergeIntoScratch(ValueVector const &source, bool isFirst);
bool updateValueVectorWithScratch(const CFGBlock *block);
bool hasNoDeclarations() const {
return declToIndex.size() == 0;
}
void resetScratch();
ValueVector::reference operator[](const VarDecl *vd);
Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
const VarDecl *vd) {
const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
assert(idx);
return getValueVector(block)[idx.value()];
}
};
}
CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
declToIndex.computeMap(dc);
unsigned decls = declToIndex.size();
scratch.resize(decls);
unsigned n = cfg.getNumBlockIDs();
if (!n)
return;
vals.resize(n);
for (auto &val : vals)
val.resize(decls);
}
#if DEBUG_LOGGING
static void printVector(const CFGBlock *block, ValueVector &bv,
unsigned num) {
llvm::errs() << block->getBlockID() << " :";
for (const auto &i : bv)
llvm::errs() << ' ' << i;
llvm::errs() << " : " << num << '\n';
}
#endif
void CFGBlockValues::setAllScratchValues(Value V) {
for (unsigned I = 0, E = scratch.size(); I != E; ++I)
scratch[I] = V;
}
void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
bool isFirst) {
if (isFirst)
scratch = source;
else
scratch |= source;
}
bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
ValueVector &dst = getValueVector(block);
bool changed = (dst != scratch);
if (changed)
dst = scratch;
#if DEBUG_LOGGING
printVector(block, scratch, 0);
#endif
return changed;
}
void CFGBlockValues::resetScratch() {
scratch.reset();
}
ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
assert(idx);
return scratch[idx.value()];
}
namespace {
class FindVarResult {
const VarDecl *vd;
const DeclRefExpr *dr;
public:
FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
const DeclRefExpr *getDeclRefExpr() const { return dr; }
const VarDecl *getDecl() const { return vd; }
};
}
static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
while (Ex) {
Ex = Ex->IgnoreParenNoopCasts(C);
if (const auto *CE = dyn_cast<CastExpr>(Ex)) {
if (CE->getCastKind() == CK_LValueBitCast) {
Ex = CE->getSubExpr();
continue;
}
}
break;
}
return Ex;
}
static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
if (const auto *DRE =
dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
if (const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()))
if (isTrackedVar(VD, DC))
return FindVarResult(VD, DRE);
return FindVarResult(nullptr, nullptr);
}
namespace {
class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
public:
enum Class {
Init,
Use,
SelfInit,
ConstRefUse,
Ignore
};
private:
const DeclContext *DC;
llvm::DenseMap<const DeclRefExpr *, Class> Classification;
bool isTrackedVar(const VarDecl *VD) const {
return ::isTrackedVar(VD, DC);
}
void classify(const Expr *E, Class C);
public:
ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
void VisitDeclStmt(DeclStmt *DS);
void VisitUnaryOperator(UnaryOperator *UO);
void VisitBinaryOperator(BinaryOperator *BO);
void VisitCallExpr(CallExpr *CE);
void VisitCastExpr(CastExpr *CE);
void VisitOMPExecutableDirective(OMPExecutableDirective *ED);
void operator()(Stmt *S) { Visit(S); }
Class get(const DeclRefExpr *DRE) const {
llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
= Classification.find(DRE);
if (I != Classification.end())
return I->second;
const auto *VD = dyn_cast<VarDecl>(DRE->getDecl());
if (!VD || !isTrackedVar(VD))
return Ignore;
return Init;
}
};
}
static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
if (VD->getType()->isRecordType())
return nullptr;
if (Expr *Init = VD->getInit()) {
const auto *DRE =
dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
if (DRE && DRE->getDecl() == VD)
return DRE;
}
return nullptr;
}
void ClassifyRefs::classify(const Expr *E, Class C) {
E = E->IgnoreParens();
if (const auto *CO = dyn_cast<ConditionalOperator>(E)) {
classify(CO->getTrueExpr(), C);
classify(CO->getFalseExpr(), C);
return;
}
if (const auto *BCO = dyn_cast<BinaryConditionalOperator>(E)) {
classify(BCO->getFalseExpr(), C);
return;
}
if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E)) {
classify(OVE->getSourceExpr(), C);
return;
}
if (const auto *ME = dyn_cast<MemberExpr>(E)) {
if (const auto *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) {
if (!VD->isStaticDataMember())
classify(ME->getBase(), C);
}
return;
}
if (const auto *BO = dyn_cast<BinaryOperator>(E)) {
switch (BO->getOpcode()) {
case BO_PtrMemD:
case BO_PtrMemI:
classify(BO->getLHS(), C);
return;
case BO_Comma:
classify(BO->getRHS(), C);
return;
default:
return;
}
}
FindVarResult Var = findVar(E, DC);
if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
Classification[DRE] = std::max(Classification[DRE], C);
}
void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
for (auto *DI : DS->decls()) {
auto *VD = dyn_cast<VarDecl>(DI);
if (VD && isTrackedVar(VD))
if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
Classification[DRE] = SelfInit;
}
}
void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
if (BO->isCompoundAssignmentOp())
classify(BO->getLHS(), Use);
else if (BO->getOpcode() == BO_Assign || BO->getOpcode() == BO_Comma)
classify(BO->getLHS(), Ignore);
}
void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
if (UO->isIncrementDecrementOp())
classify(UO->getSubExpr(), Use);
}
void ClassifyRefs::VisitOMPExecutableDirective(OMPExecutableDirective *ED) {
for (Stmt *S : OMPExecutableDirective::used_clauses_children(ED->clauses()))
classify(cast<Expr>(S), Use);
}
static bool isPointerToConst(const QualType &QT) {
return QT->isAnyPointerType() && QT->getPointeeType().isConstQualified();
}
static bool hasTrivialBody(CallExpr *CE) {
if (FunctionDecl *FD = CE->getDirectCallee()) {
if (FunctionTemplateDecl *FTD = FD->getPrimaryTemplate())
return FTD->getTemplatedDecl()->hasTrivialBody();
return FD->hasTrivialBody();
}
return false;
}
void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
if (CE->isCallToStdMove()) {
if (!CE->getArg(0)->getType()->isRecordType())
classify(CE->getArg(0), Use);
return;
}
bool isTrivialBody = hasTrivialBody(CE);
for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
I != E; ++I) {
if ((*I)->isGLValue()) {
if ((*I)->getType().isConstQualified())
classify((*I), isTrivialBody ? Ignore : ConstRefUse);
} else if (isPointerToConst((*I)->getType())) {
const Expr *Ex = stripCasts(DC->getParentASTContext(), *I);
const auto *UO = dyn_cast<UnaryOperator>(Ex);
if (UO && UO->getOpcode() == UO_AddrOf)
Ex = UO->getSubExpr();
classify(Ex, Ignore);
}
}
}
void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
if (CE->getCastKind() == CK_LValueToRValue)
classify(CE->getSubExpr(), Use);
else if (const auto *CSE = dyn_cast<CStyleCastExpr>(CE)) {
if (CSE->getType()->isVoidType()) {
classify(CSE->getSubExpr(), Ignore);
}
}
}
namespace {
class TransferFunctions : public StmtVisitor<TransferFunctions> {
CFGBlockValues &vals;
const CFG &cfg;
const CFGBlock *block;
AnalysisDeclContext ∾
const ClassifyRefs &classification;
ObjCNoReturn objCNoRet;
UninitVariablesHandler &handler;
public:
TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
const CFGBlock *block, AnalysisDeclContext &ac,
const ClassifyRefs &classification,
UninitVariablesHandler &handler)
: vals(vals), cfg(cfg), block(block), ac(ac),
classification(classification), objCNoRet(ac.getASTContext()),
handler(handler) {}
void reportUse(const Expr *ex, const VarDecl *vd);
void reportConstRefUse(const Expr *ex, const VarDecl *vd);
void VisitBinaryOperator(BinaryOperator *bo);
void VisitBlockExpr(BlockExpr *be);
void VisitCallExpr(CallExpr *ce);
void VisitDeclRefExpr(DeclRefExpr *dr);
void VisitDeclStmt(DeclStmt *ds);
void VisitGCCAsmStmt(GCCAsmStmt *as);
void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
void VisitObjCMessageExpr(ObjCMessageExpr *ME);
void VisitOMPExecutableDirective(OMPExecutableDirective *ED);
bool isTrackedVar(const VarDecl *vd) {
return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
}
FindVarResult findVar(const Expr *ex) {
return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
}
UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
UninitUse Use(ex, isAlwaysUninit(v));
assert(isUninitialized(v));
if (Use.getKind() == UninitUse::Always)
return Use;
SmallVector<const CFGBlock*, 32> Queue;
SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
Queue.push_back(block);
SuccsVisited[block->getBlockID()] = block->succ_size();
while (!Queue.empty()) {
const CFGBlock *B = Queue.pop_back_val();
if (B == &cfg.getEntry())
Use.setUninitAfterCall();
for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
I != E; ++I) {
const CFGBlock *Pred = *I;
if (!Pred)
continue;
Value AtPredExit = vals.getValue(Pred, B, vd);
if (AtPredExit == Initialized)
continue;
if (AtPredExit == MayUninitialized &&
vals.getValue(B, nullptr, vd) == Uninitialized) {
Use.setUninitAfterDecl();
continue;
}
if (AtPredExit == MayUninitialized) {
CFGTerminator term = Pred->getTerminator();
if (const auto *as = dyn_cast_or_null<GCCAsmStmt>(term.getStmt())) {
const CFGBlock *fallthrough = *Pred->succ_begin();
if (as->isAsmGoto() &&
llvm::any_of(as->outputs(), [&](const Expr *output) {
return vd == findVar(output).getDecl() &&
llvm::any_of(as->labels(),
[&](const AddrLabelExpr *label) {
return label->getLabel()->getStmt() == B->Label &&
B != fallthrough;
});
})) {
Use.setUninitAfterDecl();
continue;
}
}
}
unsigned &SV = SuccsVisited[Pred->getBlockID()];
if (!SV) {
for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
SE = Pred->succ_end();
SI != SE; ++SI)
if (!*SI)
++SV;
}
if (++SV == Pred->succ_size())
Queue.push_back(Pred);
}
}
for (const auto *Block : cfg) {
unsigned BlockID = Block->getBlockID();
const Stmt *Term = Block->getTerminatorStmt();
if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
Term) {
for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
E = Block->succ_end(); I != E; ++I) {
const CFGBlock *Succ = *I;
if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
vals.getValue(Block, Succ, vd) == Uninitialized) {
if (isa<SwitchStmt>(Term)) {
const Stmt *Label = Succ->getLabel();
if (!Label || !isa<SwitchCase>(Label))
continue;
UninitUse::Branch Branch;
Branch.Terminator = Label;
Branch.Output = 0; Use.addUninitBranch(Branch);
} else {
UninitUse::Branch Branch;
Branch.Terminator = Term;
Branch.Output = I - Block->succ_begin();
Use.addUninitBranch(Branch);
}
}
}
}
}
return Use;
}
};
}
void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
Value v = vals[vd];
if (isUninitialized(v))
handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
}
void TransferFunctions::reportConstRefUse(const Expr *ex, const VarDecl *vd) {
Value v = vals[vd];
if (isAlwaysUninit(v))
handler.handleConstRefUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
}
void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
if (const auto *DS = dyn_cast<DeclStmt>(FS->getElement())) {
const auto *VD = cast<VarDecl>(DS->getSingleDecl());
if (isTrackedVar(VD))
vals[VD] = Initialized;
}
}
void TransferFunctions::VisitOMPExecutableDirective(
OMPExecutableDirective *ED) {
for (Stmt *S : OMPExecutableDirective::used_clauses_children(ED->clauses())) {
assert(S && "Expected non-null used-in-clause child.");
Visit(S);
}
if (!ED->isStandaloneDirective())
Visit(ED->getStructuredBlock());
}
void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
const BlockDecl *bd = be->getBlockDecl();
for (const auto &I : bd->captures()) {
const VarDecl *vd = I.getVariable();
if (!isTrackedVar(vd))
continue;
if (I.isByRef()) {
vals[vd] = Initialized;
continue;
}
reportUse(be, vd);
}
}
void TransferFunctions::VisitCallExpr(CallExpr *ce) {
if (Decl *Callee = ce->getCalleeDecl()) {
if (Callee->hasAttr<ReturnsTwiceAttr>()) {
vals.setAllScratchValues(Initialized);
}
else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
vals.setAllScratchValues(Unknown);
}
}
}
void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
switch (classification.get(dr)) {
case ClassifyRefs::Ignore:
break;
case ClassifyRefs::Use:
reportUse(dr, cast<VarDecl>(dr->getDecl()));
break;
case ClassifyRefs::Init:
vals[cast<VarDecl>(dr->getDecl())] = Initialized;
break;
case ClassifyRefs::SelfInit:
handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
break;
case ClassifyRefs::ConstRefUse:
reportConstRefUse(dr, cast<VarDecl>(dr->getDecl()));
break;
}
}
void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
if (BO->getOpcode() == BO_Assign) {
FindVarResult Var = findVar(BO->getLHS());
if (const VarDecl *VD = Var.getDecl())
vals[VD] = Initialized;
}
}
void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
for (auto *DI : DS->decls()) {
auto *VD = dyn_cast<VarDecl>(DI);
if (VD && isTrackedVar(VD)) {
if (getSelfInitExpr(VD)) {
vals[VD] = Uninitialized;
} else if (VD->getInit()) {
vals[VD] = Initialized;
} else {
vals[VD] = Uninitialized;
}
}
}
}
void TransferFunctions::VisitGCCAsmStmt(GCCAsmStmt *as) {
if (!as->isAsmGoto())
return;
ASTContext &C = ac.getASTContext();
for (const Expr *O : as->outputs()) {
const Expr *Ex = stripCasts(C, O);
while (const auto *UO = dyn_cast<UnaryOperator>(Ex))
Ex = stripCasts(C, UO->getSubExpr());
if (const VarDecl *VD = findVar(Ex).getDecl())
vals[VD] = MayUninitialized;
}
}
void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
if (objCNoRet.isImplicitNoReturn(ME)) {
vals.setAllScratchValues(Unknown);
}
}
static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
AnalysisDeclContext &ac, CFGBlockValues &vals,
const ClassifyRefs &classification,
llvm::BitVector &wasAnalyzed,
UninitVariablesHandler &handler) {
wasAnalyzed[block->getBlockID()] = true;
vals.resetScratch();
bool isFirst = true;
for (CFGBlock::const_pred_iterator I = block->pred_begin(),
E = block->pred_end(); I != E; ++I) {
const CFGBlock *pred = *I;
if (!pred)
continue;
if (wasAnalyzed[pred->getBlockID()]) {
vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
isFirst = false;
}
}
TransferFunctions tf(vals, cfg, block, ac, classification, handler);
for (const auto &I : *block) {
if (Optional<CFGStmt> cs = I.getAs<CFGStmt>())
tf.Visit(const_cast<Stmt *>(cs->getStmt()));
}
CFGTerminator terminator = block->getTerminator();
if (auto *as = dyn_cast_or_null<GCCAsmStmt>(terminator.getStmt()))
if (as->isAsmGoto())
tf.Visit(as);
return vals.updateValueVectorWithScratch(block);
}
namespace {
struct PruneBlocksHandler : public UninitVariablesHandler {
llvm::BitVector hadUse;
bool hadAnyUse = false;
unsigned currentBlock = 0;
PruneBlocksHandler(unsigned numBlocks) : hadUse(numBlocks, false) {}
~PruneBlocksHandler() override = default;
void handleUseOfUninitVariable(const VarDecl *vd,
const UninitUse &use) override {
hadUse[currentBlock] = true;
hadAnyUse = true;
}
void handleConstRefUseOfUninitVariable(const VarDecl *vd,
const UninitUse &use) override {
hadUse[currentBlock] = true;
hadAnyUse = true;
}
void handleSelfInit(const VarDecl *vd) override {
hadUse[currentBlock] = true;
hadAnyUse = true;
}
};
}
void clang::runUninitializedVariablesAnalysis(
const DeclContext &dc,
const CFG &cfg,
AnalysisDeclContext &ac,
UninitVariablesHandler &handler,
UninitVariablesAnalysisStats &stats) {
CFGBlockValues vals(cfg);
vals.computeSetOfDeclarations(dc);
if (vals.hasNoDeclarations())
return;
stats.NumVariablesAnalyzed = vals.getNumEntries();
ClassifyRefs classification(ac);
cfg.VisitBlockStmts(classification);
const CFGBlock &entry = cfg.getEntry();
ValueVector &vec = vals.getValueVector(&entry);
const unsigned n = vals.getNumEntries();
for (unsigned j = 0; j < n; ++j) {
vec[j] = Uninitialized;
}
ForwardDataflowWorklist worklist(cfg, ac);
llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
worklist.enqueueSuccessors(&cfg.getEntry());
llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
wasAnalyzed[cfg.getEntry().getBlockID()] = true;
PruneBlocksHandler PBH(cfg.getNumBlockIDs());
while (const CFGBlock *block = worklist.dequeue()) {
PBH.currentBlock = block->getBlockID();
bool changed = runOnBlock(block, cfg, ac, vals,
classification, wasAnalyzed, PBH);
++stats.NumBlockVisits;
if (changed || !previouslyVisited[block->getBlockID()])
worklist.enqueueSuccessors(block);
previouslyVisited[block->getBlockID()] = true;
}
if (!PBH.hadAnyUse)
return;
for (const auto *block : cfg)
if (PBH.hadUse[block->getBlockID()]) {
runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
++stats.NumBlockVisits;
}
}
UninitVariablesHandler::~UninitVariablesHandler() = default;