1 //== ArrayBoundCheckerV2.cpp ------------------------------------*- C++ -*--==//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines ArrayBoundCheckerV2, which is a path-sensitive check
11 // which looks for an out-of-bound array element access.
13 //===----------------------------------------------------------------------===//
15 #include "ClangSACheckers.h"
16 #include "clang/AST/CharUnits.h"
17 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
18 #include "clang/StaticAnalyzer/Core/Checker.h"
19 #include "clang/StaticAnalyzer/Core/CheckerManager.h"
20 #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
21 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
22 #include "llvm/ADT/SmallString.h"
23 #include "llvm/Support/raw_ostream.h"
25 using namespace clang;
29 class ArrayBoundCheckerV2 :
30 public Checker<check::Location> {
31 mutable OwningPtr<BuiltinBug> BT;
33 enum OOB_Kind { OOB_Precedes, OOB_Excedes, OOB_Tainted };
35 void reportOOB(CheckerContext &C, ProgramStateRef errorState,
39 void checkLocation(SVal l, bool isLoad, const Stmt*S,
40 CheckerContext &C) const;
43 // FIXME: Eventually replace RegionRawOffset with this class.
44 class RegionRawOffsetV2 {
46 const SubRegion *baseRegion;
50 : baseRegion(0), byteOffset(UnknownVal()) {}
53 RegionRawOffsetV2(const SubRegion* base, SVal offset)
54 : baseRegion(base), byteOffset(offset) {}
56 NonLoc getByteOffset() const { return byteOffset.castAs<NonLoc>(); }
57 const SubRegion *getRegion() const { return baseRegion; }
59 static RegionRawOffsetV2 computeOffset(ProgramStateRef state,
60 SValBuilder &svalBuilder,
64 void dumpToStream(raw_ostream &os) const;
68 static SVal computeExtentBegin(SValBuilder &svalBuilder,
69 const MemRegion *region) {
71 switch (region->getKind()) {
73 return svalBuilder.makeZeroArrayIndex();
74 case MemRegion::SymbolicRegionKind:
75 // FIXME: improve this later by tracking symbolic lower bounds
76 // for symbolic regions.
78 case MemRegion::ElementRegionKind:
79 region = cast<SubRegion>(region)->getSuperRegion();
84 void ArrayBoundCheckerV2::checkLocation(SVal location, bool isLoad,
86 CheckerContext &checkerContext) const {
88 // NOTE: Instead of using ProgramState::assumeInBound(), we are prototyping
89 // some new logic here that reasons directly about memory region extents.
90 // Once that logic is more mature, we can bring it back to assumeInBound()
91 // for all clients to use.
93 // The algorithm we are using here for bounds checking is to see if the
94 // memory access is within the extent of the base region. Since we
95 // have some flexibility in defining the base region, we can achieve
96 // various levels of conservatism in our buffer overflow checking.
97 ProgramStateRef state = checkerContext.getState();
98 ProgramStateRef originalState = state;
100 SValBuilder &svalBuilder = checkerContext.getSValBuilder();
101 const RegionRawOffsetV2 &rawOffset =
102 RegionRawOffsetV2::computeOffset(state, svalBuilder, location);
104 if (!rawOffset.getRegion())
107 // CHECK LOWER BOUND: Is byteOffset < extent begin?
108 // If so, we are doing a load/store
109 // before the first valid offset in the memory region.
111 SVal extentBegin = computeExtentBegin(svalBuilder, rawOffset.getRegion());
113 if (Optional<NonLoc> NV = extentBegin.getAs<NonLoc>()) {
115 svalBuilder.evalBinOpNN(state, BO_LT, rawOffset.getByteOffset(), *NV,
116 svalBuilder.getConditionType());
118 Optional<NonLoc> lowerBoundToCheck = lowerBound.getAs<NonLoc>();
119 if (!lowerBoundToCheck)
122 ProgramStateRef state_precedesLowerBound, state_withinLowerBound;
123 llvm::tie(state_precedesLowerBound, state_withinLowerBound) =
124 state->assume(*lowerBoundToCheck);
126 // Are we constrained enough to definitely precede the lower bound?
127 if (state_precedesLowerBound && !state_withinLowerBound) {
128 reportOOB(checkerContext, state_precedesLowerBound, OOB_Precedes);
132 // Otherwise, assume the constraint of the lower bound.
133 assert(state_withinLowerBound);
134 state = state_withinLowerBound;
138 // CHECK UPPER BOUND: Is byteOffset >= extent(baseRegion)? If so,
139 // we are doing a load/store after the last valid offset.
140 DefinedOrUnknownSVal extentVal =
141 rawOffset.getRegion()->getExtent(svalBuilder);
142 if (!extentVal.getAs<NonLoc>())
146 = svalBuilder.evalBinOpNN(state, BO_GE, rawOffset.getByteOffset(),
147 extentVal.castAs<NonLoc>(),
148 svalBuilder.getConditionType());
150 Optional<NonLoc> upperboundToCheck = upperbound.getAs<NonLoc>();
151 if (!upperboundToCheck)
154 ProgramStateRef state_exceedsUpperBound, state_withinUpperBound;
155 llvm::tie(state_exceedsUpperBound, state_withinUpperBound) =
156 state->assume(*upperboundToCheck);
158 // If we are under constrained and the index variables are tainted, report.
159 if (state_exceedsUpperBound && state_withinUpperBound) {
160 if (state->isTainted(rawOffset.getByteOffset()))
161 reportOOB(checkerContext, state_exceedsUpperBound, OOB_Tainted);
165 // If we are constrained enough to definitely exceed the upper bound, report.
166 if (state_exceedsUpperBound) {
167 assert(!state_withinUpperBound);
168 reportOOB(checkerContext, state_exceedsUpperBound, OOB_Excedes);
172 assert(state_withinUpperBound);
173 state = state_withinUpperBound;
177 if (state != originalState)
178 checkerContext.addTransition(state);
181 void ArrayBoundCheckerV2::reportOOB(CheckerContext &checkerContext,
182 ProgramStateRef errorState,
183 OOB_Kind kind) const {
185 ExplodedNode *errorNode = checkerContext.generateSink(errorState);
190 BT.reset(new BuiltinBug("Out-of-bound access"));
192 // FIXME: This diagnostics are preliminary. We should get far better
193 // diagnostics for explaining buffer overruns.
195 SmallString<256> buf;
196 llvm::raw_svector_ostream os(buf);
197 os << "Out of bound memory access ";
200 os << "(accessed memory precedes memory block)";
203 os << "(access exceeds upper limit of memory block)";
206 os << "(index is tainted)";
210 checkerContext.emitReport(new BugReport(*BT, os.str(), errorNode));
213 void RegionRawOffsetV2::dump() const {
214 dumpToStream(llvm::errs());
217 void RegionRawOffsetV2::dumpToStream(raw_ostream &os) const {
218 os << "raw_offset_v2{" << getRegion() << ',' << getByteOffset() << '}';
221 // FIXME: Merge with the implementation of the same method in Store.cpp
222 static bool IsCompleteType(ASTContext &Ctx, QualType Ty) {
223 if (const RecordType *RT = Ty->getAs<RecordType>()) {
224 const RecordDecl *D = RT->getDecl();
225 if (!D->getDefinition())
233 // Lazily computes a value to be used by 'computeOffset'. If 'val'
234 // is unknown or undefined, we lazily substitute '0'. Otherwise,
236 static inline SVal getValue(SVal val, SValBuilder &svalBuilder) {
237 return val.getAs<UndefinedVal>() ? svalBuilder.makeArrayIndex(0) : val;
240 // Scale a base value by a scaling factor, and return the scaled
241 // value as an SVal. Used by 'computeOffset'.
242 static inline SVal scaleValue(ProgramStateRef state,
243 NonLoc baseVal, CharUnits scaling,
245 return sb.evalBinOpNN(state, BO_Mul, baseVal,
246 sb.makeArrayIndex(scaling.getQuantity()),
247 sb.getArrayIndexType());
250 // Add an SVal to another, treating unknown and undefined values as
251 // summing to UnknownVal. Used by 'computeOffset'.
252 static SVal addValue(ProgramStateRef state, SVal x, SVal y,
253 SValBuilder &svalBuilder) {
254 // We treat UnknownVals and UndefinedVals the same here because we
255 // only care about computing offsets.
256 if (x.isUnknownOrUndef() || y.isUnknownOrUndef())
259 return svalBuilder.evalBinOpNN(state, BO_Add, x.castAs<NonLoc>(),
261 svalBuilder.getArrayIndexType());
264 /// Compute a raw byte offset from a base region. Used for array bounds
266 RegionRawOffsetV2 RegionRawOffsetV2::computeOffset(ProgramStateRef state,
267 SValBuilder &svalBuilder,
270 const MemRegion *region = location.getAsRegion();
271 SVal offset = UndefinedVal();
274 switch (region->getKind()) {
276 if (const SubRegion *subReg = dyn_cast<SubRegion>(region)) {
277 offset = getValue(offset, svalBuilder);
278 if (!offset.isUnknownOrUndef())
279 return RegionRawOffsetV2(subReg, offset);
281 return RegionRawOffsetV2();
283 case MemRegion::ElementRegionKind: {
284 const ElementRegion *elemReg = cast<ElementRegion>(region);
285 SVal index = elemReg->getIndex();
286 if (!index.getAs<NonLoc>())
287 return RegionRawOffsetV2();
288 QualType elemType = elemReg->getElementType();
289 // If the element is an incomplete type, go no further.
290 ASTContext &astContext = svalBuilder.getContext();
291 if (!IsCompleteType(astContext, elemType))
292 return RegionRawOffsetV2();
294 // Update the offset.
295 offset = addValue(state,
296 getValue(offset, svalBuilder),
298 index.castAs<NonLoc>(),
299 astContext.getTypeSizeInChars(elemType),
303 if (offset.isUnknownOrUndef())
304 return RegionRawOffsetV2();
306 region = elemReg->getSuperRegion();
311 return RegionRawOffsetV2();
315 void ento::registerArrayBoundCheckerV2(CheckerManager &mgr) {
316 mgr.registerChecker<ArrayBoundCheckerV2>();