1 //=== StackAddrEscapeChecker.cpp ----------------------------------*- C++ -*--//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines stack address leak checker, which checks if an invalid
11 // stack address is stored into a global or heap location. See CERT DCL30-C.
12 //
13 //===----------------------------------------------------------------------===//
14
15 #include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h"
16 #include "clang/AST/ExprCXX.h"
17 #include "clang/Basic/SourceManager.h"
18 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
19 #include "clang/StaticAnalyzer/Core/Checker.h"
20 #include "clang/StaticAnalyzer/Core/CheckerManager.h"
21 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
22 #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
23 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
24 #include "llvm/ADT/SmallString.h"
25 #include "llvm/Support/raw_ostream.h"
26 using namespace clang;
27 using namespace ento;
28
29 namespace {
30 class StackAddrEscapeChecker
31 : public Checker<check::PreCall, check::PreStmt<ReturnStmt>,
32 check::EndFunction> {
33 mutable IdentifierInfo *dispatch_semaphore_tII;
34 mutable std::unique_ptr<BuiltinBug> BT_stackleak;
35 mutable std::unique_ptr<BuiltinBug> BT_returnstack;
36 mutable std::unique_ptr<BuiltinBug> BT_capturedstackasync;
37 mutable std::unique_ptr<BuiltinBug> BT_capturedstackret;
38
39 public:
40 enum CheckKind {
41 CK_StackAddrEscapeChecker,
42 CK_StackAddrAsyncEscapeChecker,
43 CK_NumCheckKinds
44 };
45
46 DefaultBool ChecksEnabled[CK_NumCheckKinds];
47
48 void checkPreCall(const CallEvent &Call, CheckerContext &C) const;
49 void checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const;
50 void checkEndFunction(const ReturnStmt *RS, CheckerContext &Ctx) const;
51
52 private:
53 void checkReturnedBlockCaptures(const BlockDataRegion &B,
54 CheckerContext &C) const;
55 void checkAsyncExecutedBlockCaptures(const BlockDataRegion &B,
56 CheckerContext &C) const;
57 void EmitStackError(CheckerContext &C, const MemRegion *R,
58 const Expr *RetE) const;
59 bool isSemaphoreCaptured(const BlockDecl &B) const;
60 static SourceRange genName(raw_ostream &os, const MemRegion *R,
61 ASTContext &Ctx);
62 static SmallVector<const MemRegion *, 4>
63 getCapturedStackRegions(const BlockDataRegion &B, CheckerContext &C);
64 static bool isArcManagedBlock(const MemRegion *R, CheckerContext &C);
65 static bool isNotInCurrentFrame(const MemRegion *R, CheckerContext &C);
66 };
67 } // namespace
68
genName(raw_ostream & os,const MemRegion * R,ASTContext & Ctx)69 SourceRange StackAddrEscapeChecker::genName(raw_ostream &os, const MemRegion *R,
70 ASTContext &Ctx) {
71 // Get the base region, stripping away fields and elements.
72 R = R->getBaseRegion();
73 SourceManager &SM = Ctx.getSourceManager();
74 SourceRange range;
75 os << "Address of ";
76
77 // Check if the region is a compound literal.
78 if (const auto *CR = dyn_cast<CompoundLiteralRegion>(R)) {
79 const CompoundLiteralExpr *CL = CR->getLiteralExpr();
80 os << "stack memory associated with a compound literal "
81 "declared on line "
82 << SM.getExpansionLineNumber(CL->getBeginLoc()) << " returned to caller";
83 range = CL->getSourceRange();
84 } else if (const auto *AR = dyn_cast<AllocaRegion>(R)) {
85 const Expr *ARE = AR->getExpr();
86 SourceLocation L = ARE->getBeginLoc();
87 range = ARE->getSourceRange();
88 os << "stack memory allocated by call to alloca() on line "
89 << SM.getExpansionLineNumber(L);
90 } else if (const auto *BR = dyn_cast<BlockDataRegion>(R)) {
91 const BlockDecl *BD = BR->getCodeRegion()->getDecl();
92 SourceLocation L = BD->getBeginLoc();
93 range = BD->getSourceRange();
94 os << "stack-allocated block declared on line "
95 << SM.getExpansionLineNumber(L);
96 } else if (const auto *VR = dyn_cast<VarRegion>(R)) {
97 os << "stack memory associated with local variable '" << VR->getString()
98 << '\'';
99 range = VR->getDecl()->getSourceRange();
100 } else if (const auto *TOR = dyn_cast<CXXTempObjectRegion>(R)) {
101 QualType Ty = TOR->getValueType().getLocalUnqualifiedType();
102 os << "stack memory associated with temporary object of type '";
103 Ty.print(os, Ctx.getPrintingPolicy());
104 os << "'";
105 range = TOR->getExpr()->getSourceRange();
106 } else {
107 llvm_unreachable("Invalid region in ReturnStackAddressChecker.");
108 }
109
110 return range;
111 }
112
isArcManagedBlock(const MemRegion * R,CheckerContext & C)113 bool StackAddrEscapeChecker::isArcManagedBlock(const MemRegion *R,
114 CheckerContext &C) {
115 assert(R && "MemRegion should not be null");
116 return C.getASTContext().getLangOpts().ObjCAutoRefCount &&
117 isa<BlockDataRegion>(R);
118 }
119
isNotInCurrentFrame(const MemRegion * R,CheckerContext & C)120 bool StackAddrEscapeChecker::isNotInCurrentFrame(const MemRegion *R,
121 CheckerContext &C) {
122 const StackSpaceRegion *S = cast<StackSpaceRegion>(R->getMemorySpace());
123 return S->getStackFrame() != C.getStackFrame();
124 }
125
isSemaphoreCaptured(const BlockDecl & B) const126 bool StackAddrEscapeChecker::isSemaphoreCaptured(const BlockDecl &B) const {
127 if (!dispatch_semaphore_tII)
128 dispatch_semaphore_tII = &B.getASTContext().Idents.get("dispatch_semaphore_t");
129 for (const auto &C : B.captures()) {
130 const auto *T = C.getVariable()->getType()->getAs<TypedefType>();
131 if (T && T->getDecl()->getIdentifier() == dispatch_semaphore_tII)
132 return true;
133 }
134 return false;
135 }
136
137 SmallVector<const MemRegion *, 4>
getCapturedStackRegions(const BlockDataRegion & B,CheckerContext & C)138 StackAddrEscapeChecker::getCapturedStackRegions(const BlockDataRegion &B,
139 CheckerContext &C) {
140 SmallVector<const MemRegion *, 4> Regions;
141 BlockDataRegion::referenced_vars_iterator I = B.referenced_vars_begin();
142 BlockDataRegion::referenced_vars_iterator E = B.referenced_vars_end();
143 for (; I != E; ++I) {
144 SVal Val = C.getState()->getSVal(I.getCapturedRegion());
145 const MemRegion *Region = Val.getAsRegion();
146 if (Region && isa<StackSpaceRegion>(Region->getMemorySpace()))
147 Regions.push_back(Region);
148 }
149 return Regions;
150 }
151
EmitStackError(CheckerContext & C,const MemRegion * R,const Expr * RetE) const152 void StackAddrEscapeChecker::EmitStackError(CheckerContext &C,
153 const MemRegion *R,
154 const Expr *RetE) const {
155 ExplodedNode *N = C.generateNonFatalErrorNode();
156 if (!N)
157 return;
158 if (!BT_returnstack)
159 BT_returnstack = llvm::make_unique<BuiltinBug>(
160 this, "Return of address to stack-allocated memory");
161 // Generate a report for this bug.
162 SmallString<128> buf;
163 llvm::raw_svector_ostream os(buf);
164 SourceRange range = genName(os, R, C.getASTContext());
165 os << " returned to caller";
166 auto report = llvm::make_unique<BugReport>(*BT_returnstack, os.str(), N);
167 report->addRange(RetE->getSourceRange());
168 if (range.isValid())
169 report->addRange(range);
170 C.emitReport(std::move(report));
171 }
172
checkAsyncExecutedBlockCaptures(const BlockDataRegion & B,CheckerContext & C) const173 void StackAddrEscapeChecker::checkAsyncExecutedBlockCaptures(
174 const BlockDataRegion &B, CheckerContext &C) const {
175 // There is a not-too-uncommon idiom
176 // where a block passed to dispatch_async captures a semaphore
177 // and then the thread (which called dispatch_async) is blocked on waiting
178 // for the completion of the execution of the block
179 // via dispatch_semaphore_wait. To avoid false-positives (for now)
180 // we ignore all the blocks which have captured
181 // a variable of the type "dispatch_semaphore_t".
182 if (isSemaphoreCaptured(*B.getDecl()))
183 return;
184 for (const MemRegion *Region : getCapturedStackRegions(B, C)) {
185 // The block passed to dispatch_async may capture another block
186 // created on the stack. However, there is no leak in this situaton,
187 // no matter if ARC or no ARC is enabled:
188 // dispatch_async copies the passed "outer" block (via Block_copy)
189 // and if the block has captured another "inner" block,
190 // the "inner" block will be copied as well.
191 if (isa<BlockDataRegion>(Region))
192 continue;
193 ExplodedNode *N = C.generateNonFatalErrorNode();
194 if (!N)
195 continue;
196 if (!BT_capturedstackasync)
197 BT_capturedstackasync = llvm::make_unique<BuiltinBug>(
198 this, "Address of stack-allocated memory is captured");
199 SmallString<128> Buf;
200 llvm::raw_svector_ostream Out(Buf);
201 SourceRange Range = genName(Out, Region, C.getASTContext());
202 Out << " is captured by an asynchronously-executed block";
203 auto Report =
204 llvm::make_unique<BugReport>(*BT_capturedstackasync, Out.str(), N);
205 if (Range.isValid())
206 Report->addRange(Range);
207 C.emitReport(std::move(Report));
208 }
209 }
210
checkReturnedBlockCaptures(const BlockDataRegion & B,CheckerContext & C) const211 void StackAddrEscapeChecker::checkReturnedBlockCaptures(
212 const BlockDataRegion &B, CheckerContext &C) const {
213 for (const MemRegion *Region : getCapturedStackRegions(B, C)) {
214 if (isArcManagedBlock(Region, C) || isNotInCurrentFrame(Region, C))
215 continue;
216 ExplodedNode *N = C.generateNonFatalErrorNode();
217 if (!N)
218 continue;
219 if (!BT_capturedstackret)
220 BT_capturedstackret = llvm::make_unique<BuiltinBug>(
221 this, "Address of stack-allocated memory is captured");
222 SmallString<128> Buf;
223 llvm::raw_svector_ostream Out(Buf);
224 SourceRange Range = genName(Out, Region, C.getASTContext());
225 Out << " is captured by a returned block";
226 auto Report =
227 llvm::make_unique<BugReport>(*BT_capturedstackret, Out.str(), N);
228 if (Range.isValid())
229 Report->addRange(Range);
230 C.emitReport(std::move(Report));
231 }
232 }
233
checkPreCall(const CallEvent & Call,CheckerContext & C) const234 void StackAddrEscapeChecker::checkPreCall(const CallEvent &Call,
235 CheckerContext &C) const {
236 if (!ChecksEnabled[CK_StackAddrAsyncEscapeChecker])
237 return;
238 if (!Call.isGlobalCFunction("dispatch_after") &&
239 !Call.isGlobalCFunction("dispatch_async"))
240 return;
241 for (unsigned Idx = 0, NumArgs = Call.getNumArgs(); Idx < NumArgs; ++Idx) {
242 if (const BlockDataRegion *B = dyn_cast_or_null<BlockDataRegion>(
243 Call.getArgSVal(Idx).getAsRegion()))
244 checkAsyncExecutedBlockCaptures(*B, C);
245 }
246 }
247
checkPreStmt(const ReturnStmt * RS,CheckerContext & C) const248 void StackAddrEscapeChecker::checkPreStmt(const ReturnStmt *RS,
249 CheckerContext &C) const {
250 if (!ChecksEnabled[CK_StackAddrEscapeChecker])
251 return;
252
253 const Expr *RetE = RS->getRetValue();
254 if (!RetE)
255 return;
256 RetE = RetE->IgnoreParens();
257
258 SVal V = C.getSVal(RetE);
259 const MemRegion *R = V.getAsRegion();
260 if (!R)
261 return;
262
263 if (const BlockDataRegion *B = dyn_cast<BlockDataRegion>(R))
264 checkReturnedBlockCaptures(*B, C);
265
266 if (!isa<StackSpaceRegion>(R->getMemorySpace()) ||
267 isNotInCurrentFrame(R, C) || isArcManagedBlock(R, C))
268 return;
269
270 // Returning a record by value is fine. (In this case, the returned
271 // expression will be a copy-constructor, possibly wrapped in an
272 // ExprWithCleanups node.)
273 if (const ExprWithCleanups *Cleanup = dyn_cast<ExprWithCleanups>(RetE))
274 RetE = Cleanup->getSubExpr();
275 if (isa<CXXConstructExpr>(RetE) && RetE->getType()->isRecordType())
276 return;
277
278 // The CK_CopyAndAutoreleaseBlockObject cast causes the block to be copied
279 // so the stack address is not escaping here.
280 if (auto *ICE = dyn_cast<ImplicitCastExpr>(RetE)) {
281 if (isa<BlockDataRegion>(R) &&
282 ICE->getCastKind() == CK_CopyAndAutoreleaseBlockObject) {
283 return;
284 }
285 }
286
287 EmitStackError(C, R, RetE);
288 }
289
checkEndFunction(const ReturnStmt * RS,CheckerContext & Ctx) const290 void StackAddrEscapeChecker::checkEndFunction(const ReturnStmt *RS,
291 CheckerContext &Ctx) const {
292 if (!ChecksEnabled[CK_StackAddrEscapeChecker])
293 return;
294
295 ProgramStateRef State = Ctx.getState();
296
297 // Iterate over all bindings to global variables and see if it contains
298 // a memory region in the stack space.
299 class CallBack : public StoreManager::BindingsHandler {
300 private:
301 CheckerContext &Ctx;
302 const StackFrameContext *CurSFC;
303
304 public:
305 SmallVector<std::pair<const MemRegion *, const MemRegion *>, 10> V;
306
307 CallBack(CheckerContext &CC) : Ctx(CC), CurSFC(CC.getStackFrame()) {}
308
309 bool HandleBinding(StoreManager &SMgr, Store S, const MemRegion *Region,
310 SVal Val) override {
311
312 if (!isa<GlobalsSpaceRegion>(Region->getMemorySpace()))
313 return true;
314 const MemRegion *VR = Val.getAsRegion();
315 if (VR && isa<StackSpaceRegion>(VR->getMemorySpace()) &&
316 !isArcManagedBlock(VR, Ctx) && !isNotInCurrentFrame(VR, Ctx))
317 V.emplace_back(Region, VR);
318 return true;
319 }
320 };
321
322 CallBack Cb(Ctx);
323 State->getStateManager().getStoreManager().iterBindings(State->getStore(),
324 Cb);
325
326 if (Cb.V.empty())
327 return;
328
329 // Generate an error node.
330 ExplodedNode *N = Ctx.generateNonFatalErrorNode(State);
331 if (!N)
332 return;
333
334 if (!BT_stackleak)
335 BT_stackleak = llvm::make_unique<BuiltinBug>(
336 this, "Stack address stored into global variable",
337 "Stack address was saved into a global variable. "
338 "This is dangerous because the address will become "
339 "invalid after returning from the function");
340
341 for (const auto &P : Cb.V) {
342 // Generate a report for this bug.
343 SmallString<128> Buf;
344 llvm::raw_svector_ostream Out(Buf);
345 SourceRange Range = genName(Out, P.second, Ctx.getASTContext());
346 Out << " is still referred to by the ";
347 if (isa<StaticGlobalSpaceRegion>(P.first->getMemorySpace()))
348 Out << "static";
349 else
350 Out << "global";
351 Out << " variable '";
352 const VarRegion *VR = cast<VarRegion>(P.first->getBaseRegion());
353 Out << *VR->getDecl()
354 << "' upon returning to the caller. This will be a dangling reference";
355 auto Report = llvm::make_unique<BugReport>(*BT_stackleak, Out.str(), N);
356 if (Range.isValid())
357 Report->addRange(Range);
358
359 Ctx.emitReport(std::move(Report));
360 }
361 }
362
363 #define REGISTER_CHECKER(name) \
364 void ento::register##name(CheckerManager &Mgr) { \
365 StackAddrEscapeChecker *Chk = \
366 Mgr.registerChecker<StackAddrEscapeChecker>(); \
367 Chk->ChecksEnabled[StackAddrEscapeChecker::CK_##name] = true; \
368 }
369
370 REGISTER_CHECKER(StackAddrEscapeChecker)
371 REGISTER_CHECKER(StackAddrAsyncEscapeChecker)
372