1 //=======- PaddingChecker.cpp ------------------------------------*- C++ -*-==//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines a checker that checks for padding that could be
11 // removed by re-ordering members.
13 //===----------------------------------------------------------------------===//
15 #include "ClangSACheckers.h"
16 #include "clang/AST/CharUnits.h"
17 #include "clang/AST/DeclTemplate.h"
18 #include "clang/AST/RecordLayout.h"
19 #include "clang/AST/RecursiveASTVisitor.h"
20 #include "clang/StaticAnalyzer/Core/BugReporter/BugReporter.h"
21 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
22 #include "clang/StaticAnalyzer/Core/Checker.h"
23 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
24 #include "llvm/ADT/SmallString.h"
25 #include "llvm/Support/MathExtras.h"
26 #include "llvm/Support/raw_ostream.h"
29 using namespace clang;
33 class PaddingChecker : public Checker<check::ASTDecl<TranslationUnitDecl>> {
35 mutable std::unique_ptr<BugType> PaddingBug;
36 mutable int64_t AllowedPad;
37 mutable BugReporter *BR;
40 void checkASTDecl(const TranslationUnitDecl *TUD, AnalysisManager &MGR,
41 BugReporter &BRArg) const {
44 MGR.getAnalyzerOptions().getOptionAsInteger("AllowedPad", 24, this);
45 assert(AllowedPad >= 0 && "AllowedPad option should be non-negative");
47 // The calls to checkAST* from AnalysisConsumer don't
48 // visit template instantiations or lambda classes. We
49 // want to visit those, so we make our own RecursiveASTVisitor.
50 struct LocalVisitor : public RecursiveASTVisitor<LocalVisitor> {
51 const PaddingChecker *Checker;
52 bool shouldVisitTemplateInstantiations() const { return true; }
53 bool shouldVisitImplicitCode() const { return true; }
54 explicit LocalVisitor(const PaddingChecker *Checker) : Checker(Checker) {}
55 bool VisitRecordDecl(const RecordDecl *RD) {
56 Checker->visitRecord(RD);
59 bool VisitVarDecl(const VarDecl *VD) {
60 Checker->visitVariable(VD);
63 // TODO: Visit array new and mallocs for arrays.
66 LocalVisitor visitor(this);
67 visitor.TraverseDecl(const_cast<TranslationUnitDecl *>(TUD));
70 /// \brief Look for records of overly padded types. If padding *
71 /// PadMultiplier exceeds AllowedPad, then generate a report.
72 /// PadMultiplier is used to share code with the array padding
74 void visitRecord(const RecordDecl *RD, uint64_t PadMultiplier = 1) const {
75 if (shouldSkipDecl(RD))
78 auto &ASTContext = RD->getASTContext();
79 const ASTRecordLayout &RL = ASTContext.getASTRecordLayout(RD);
80 assert(llvm::isPowerOf2_64(RL.getAlignment().getQuantity()));
82 CharUnits BaselinePad = calculateBaselinePad(RD, ASTContext, RL);
83 if (BaselinePad.isZero())
87 SmallVector<const FieldDecl *, 20> OptimalFieldsOrder;
88 std::tie(OptimalPad, OptimalFieldsOrder) =
89 calculateOptimalPad(RD, ASTContext, RL);
91 CharUnits DiffPad = PadMultiplier * (BaselinePad - OptimalPad);
92 if (DiffPad.getQuantity() <= AllowedPad) {
93 assert(!DiffPad.isNegative() && "DiffPad should not be negative");
94 // There is not enough excess padding to trigger a warning.
97 reportRecord(RD, BaselinePad, OptimalPad, OptimalFieldsOrder);
100 /// \brief Look for arrays of overly padded types. If the padding of the
101 /// array type exceeds AllowedPad, then generate a report.
102 void visitVariable(const VarDecl *VD) const {
103 const ArrayType *ArrTy = VD->getType()->getAsArrayTypeUnsafe();
104 if (ArrTy == nullptr)
107 if (const ConstantArrayType *CArrTy = dyn_cast<ConstantArrayType>(ArrTy))
108 Elts = CArrTy->getSize().getZExtValue();
111 const RecordType *RT = ArrTy->getElementType()->getAs<RecordType>();
115 // TODO: Recurse into the fields and base classes to see if any
116 // of those have excess padding.
117 visitRecord(RT->getDecl(), Elts);
120 bool shouldSkipDecl(const RecordDecl *RD) const {
121 auto Location = RD->getLocation();
122 // If the construct doesn't have a source file, then it's not something
123 // we want to diagnose.
124 if (!Location.isValid())
126 SrcMgr::CharacteristicKind Kind =
127 BR->getSourceManager().getFileCharacteristic(Location);
128 // Throw out all records that come from system headers.
129 if (Kind != SrcMgr::C_User)
132 // Not going to attempt to optimize unions.
135 // How do you reorder fields if you haven't got any?
136 if (RD->field_empty())
138 if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
139 // Tail padding with base classes ends up being very complicated.
140 // We will skip objects with base classes for now.
141 if (CXXRD->getNumBases() != 0)
143 // Virtual bases are complicated, skipping those for now.
144 if (CXXRD->getNumVBases() != 0)
146 // Can't layout a template, so skip it. We do still layout the
147 // instantiations though.
148 if (CXXRD->getTypeForDecl()->isDependentType())
150 if (CXXRD->getTypeForDecl()->isInstantiationDependentType())
153 auto IsTrickyField = [](const FieldDecl *FD) -> bool {
154 // Bitfield layout is hard.
155 if (FD->isBitField())
158 // Variable length arrays are tricky too.
159 QualType Ty = FD->getType();
160 if (Ty->isIncompleteArrayType())
165 if (std::any_of(RD->field_begin(), RD->field_end(), IsTrickyField))
170 static CharUnits calculateBaselinePad(const RecordDecl *RD,
171 const ASTContext &ASTContext,
172 const ASTRecordLayout &RL) {
173 CharUnits PaddingSum;
174 CharUnits Offset = ASTContext.toCharUnitsFromBits(RL.getFieldOffset(0));
175 for (const FieldDecl *FD : RD->fields()) {
176 // This checker only cares about the padded size of the
177 // field, and not the data size. If the field is a record
178 // with tail padding, then we won't put that number in our
179 // total because reordering fields won't fix that problem.
180 CharUnits FieldSize = ASTContext.getTypeSizeInChars(FD->getType());
181 auto FieldOffsetBits = RL.getFieldOffset(FD->getFieldIndex());
182 CharUnits FieldOffset = ASTContext.toCharUnitsFromBits(FieldOffsetBits);
183 PaddingSum += (FieldOffset - Offset);
184 Offset = FieldOffset + FieldSize;
186 PaddingSum += RL.getSize() - Offset;
190 /// Optimal padding overview:
191 /// 1. Find a close approximation to where we can place our first field.
192 /// This will usually be at offset 0.
193 /// 2. Try to find the best field that can legally be placed at the current
195 /// a. "Best" is the largest alignment that is legal, but smallest size.
196 /// This is to account for overly aligned types.
197 /// 3. If no fields can fit, pad by rounding the current offset up to the
198 /// smallest alignment requirement of our fields. Measure and track the
199 // amount of padding added. Go back to 2.
200 /// 4. Increment the current offset by the size of the chosen field.
201 /// 5. Remove the chosen field from the set of future possibilities.
202 /// 6. Go back to 2 if there are still unplaced fields.
203 /// 7. Add tail padding by rounding the current offset up to the structure
204 /// alignment. Track the amount of padding added.
206 static std::pair<CharUnits, SmallVector<const FieldDecl *, 20>>
207 calculateOptimalPad(const RecordDecl *RD, const ASTContext &ASTContext,
208 const ASTRecordLayout &RL) {
212 const FieldDecl *Field;
213 bool operator<(const FieldInfo &RHS) const {
214 // Order from small alignments to large alignments,
215 // then large sizes to small sizes.
216 // then large field indices to small field indices
217 return std::make_tuple(Align, -Size,
218 Field ? -static_cast<int>(Field->getFieldIndex())
221 RHS.Align, -RHS.Size,
222 RHS.Field ? -static_cast<int>(RHS.Field->getFieldIndex())
226 SmallVector<FieldInfo, 20> Fields;
227 auto GatherSizesAndAlignments = [](const FieldDecl *FD) {
230 auto &Ctx = FD->getASTContext();
231 std::tie(RetVal.Size, RetVal.Align) =
232 Ctx.getTypeInfoInChars(FD->getType());
233 assert(llvm::isPowerOf2_64(RetVal.Align.getQuantity()));
234 if (auto Max = FD->getMaxAlignment())
235 RetVal.Align = std::max(Ctx.toCharUnitsFromBits(Max), RetVal.Align);
238 std::transform(RD->field_begin(), RD->field_end(),
239 std::back_inserter(Fields), GatherSizesAndAlignments);
240 std::sort(Fields.begin(), Fields.end());
241 // This lets us skip over vptrs and non-virtual bases,
242 // so that we can just worry about the fields in our object.
243 // Note that this does cause us to miss some cases where we
244 // could pack more bytes in to a base class's tail padding.
245 CharUnits NewOffset = ASTContext.toCharUnitsFromBits(RL.getFieldOffset(0));
247 SmallVector<const FieldDecl *, 20> OptimalFieldsOrder;
248 while (!Fields.empty()) {
249 unsigned TrailingZeros =
250 llvm::countTrailingZeros((unsigned long long)NewOffset.getQuantity());
251 // If NewOffset is zero, then countTrailingZeros will be 64. Shifting
252 // 64 will overflow our unsigned long long. Shifting 63 will turn
253 // our long long (and CharUnits internal type) negative. So shift 62.
254 long long CurAlignmentBits = 1ull << (std::min)(TrailingZeros, 62u);
255 CharUnits CurAlignment = CharUnits::fromQuantity(CurAlignmentBits);
256 FieldInfo InsertPoint = {CurAlignment, CharUnits::Zero(), nullptr};
257 auto CurBegin = Fields.begin();
258 auto CurEnd = Fields.end();
260 // In the typical case, this will find the last element
261 // of the vector. We won't find a middle element unless
262 // we started on a poorly aligned address or have an overly
264 auto Iter = std::upper_bound(CurBegin, CurEnd, InsertPoint);
265 if (Iter != CurBegin) {
266 // We found a field that we can layout with the current alignment.
268 NewOffset += Iter->Size;
269 OptimalFieldsOrder.push_back(Iter->Field);
272 // We are poorly aligned, and we need to pad in order to layout another
273 // field. Round up to at least the smallest field alignment that we
275 CharUnits NextOffset = NewOffset.alignTo(Fields[0].Align);
276 NewPad += NextOffset - NewOffset;
277 NewOffset = NextOffset;
280 // Calculate tail padding.
281 CharUnits NewSize = NewOffset.alignTo(RL.getAlignment());
282 NewPad += NewSize - NewOffset;
283 return {NewPad, std::move(OptimalFieldsOrder)};
287 const RecordDecl *RD, CharUnits BaselinePad, CharUnits OptimalPad,
288 const SmallVector<const FieldDecl *, 20> &OptimalFieldsOrder) const {
291 llvm::make_unique<BugType>(this, "Excessive Padding", "Performance");
293 SmallString<100> Buf;
294 llvm::raw_svector_ostream Os(Buf);
295 Os << "Excessive padding in '";
296 Os << QualType::getAsString(RD->getTypeForDecl(), Qualifiers(),
300 if (auto *TSD = dyn_cast<ClassTemplateSpecializationDecl>(RD)) {
301 // TODO: make this show up better in the console output and in
302 // the HTML. Maybe just make it show up in HTML like the path
304 SourceLocation ILoc = TSD->getPointOfInstantiation();
306 Os << " instantiated here: "
307 << ILoc.printToString(BR->getSourceManager());
310 Os << " (" << BaselinePad.getQuantity() << " padding bytes, where "
311 << OptimalPad.getQuantity() << " is optimal). \n"
312 << "Optimal fields order: \n";
313 for (const auto *FD : OptimalFieldsOrder)
314 Os << FD->getName() << ", \n";
315 Os << "consider reordering the fields or adding explicit padding "
318 PathDiagnosticLocation CELoc =
319 PathDiagnosticLocation::create(RD, BR->getSourceManager());
320 auto Report = llvm::make_unique<BugReport>(*PaddingBug, Os.str(), CELoc);
321 Report->setDeclWithIssue(RD);
322 Report->addRange(RD->getSourceRange());
323 BR->emitReport(std::move(Report));
328 void ento::registerPaddingChecker(CheckerManager &Mgr) {
329 Mgr.registerChecker<PaddingChecker>();