1 //===-- ImplicitNullChecks.cpp - Fold null checks into memory accesses ----===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This pass turns explicit null checks of the form
11 //
12 //   test %r10, %r10
13 //   je throw_npe
14 //   movl (%r10), %esi
15 //   ...
16 //
17 // to
18 //
19 //   faulting_load_op("movl (%r10), %esi", throw_npe)
20 //   ...
21 //
22 // With the help of a runtime that understands the .fault_maps section,
23 // faulting_load_op branches to throw_npe if executing movl (%r10), %esi incurs
24 // a page fault.
25 //
26 //===----------------------------------------------------------------------===//
27 
28 #include "llvm/ADT/DenseSet.h"
29 #include "llvm/ADT/SmallVector.h"
30 #include "llvm/ADT/Statistic.h"
31 #include "llvm/Analysis/AliasAnalysis.h"
32 #include "llvm/CodeGen/Passes.h"
33 #include "llvm/CodeGen/MachineFunction.h"
34 #include "llvm/CodeGen/MachineMemOperand.h"
35 #include "llvm/CodeGen/MachineOperand.h"
36 #include "llvm/CodeGen/MachineFunctionPass.h"
37 #include "llvm/CodeGen/MachineInstrBuilder.h"
38 #include "llvm/CodeGen/MachineRegisterInfo.h"
39 #include "llvm/CodeGen/MachineModuleInfo.h"
40 #include "llvm/IR/BasicBlock.h"
41 #include "llvm/IR/Instruction.h"
42 #include "llvm/IR/LLVMContext.h"
43 #include "llvm/Support/CommandLine.h"
44 #include "llvm/Support/Debug.h"
45 #include "llvm/Target/TargetSubtargetInfo.h"
46 #include "llvm/Target/TargetInstrInfo.h"
47 
48 using namespace llvm;
49 
50 static cl::opt<int> PageSize("imp-null-check-page-size",
51                              cl::desc("The page size of the target in bytes"),
52                              cl::init(4096));
53 
54 #define DEBUG_TYPE "implicit-null-checks"
55 
56 STATISTIC(NumImplicitNullChecks,
57           "Number of explicit null checks made implicit");
58 
59 namespace {
60 
61 class ImplicitNullChecks : public MachineFunctionPass {
62   /// Represents one null check that can be made implicit.
63   class NullCheck {
64     // The memory operation the null check can be folded into.
65     MachineInstr *MemOperation;
66 
67     // The instruction actually doing the null check (Ptr != 0).
68     MachineInstr *CheckOperation;
69 
70     // The block the check resides in.
71     MachineBasicBlock *CheckBlock;
72 
73     // The block branched to if the pointer is non-null.
74     MachineBasicBlock *NotNullSucc;
75 
76     // The block branched to if the pointer is null.
77     MachineBasicBlock *NullSucc;
78 
79     // If this is non-null, then MemOperation has a dependency on on this
80     // instruction; and it needs to be hoisted to execute before MemOperation.
81     MachineInstr *OnlyDependency;
82 
83   public:
84     explicit NullCheck(MachineInstr *memOperation, MachineInstr *checkOperation,
85                        MachineBasicBlock *checkBlock,
86                        MachineBasicBlock *notNullSucc,
87                        MachineBasicBlock *nullSucc,
88                        MachineInstr *onlyDependency)
89         : MemOperation(memOperation), CheckOperation(checkOperation),
90           CheckBlock(checkBlock), NotNullSucc(notNullSucc), NullSucc(nullSucc),
91           OnlyDependency(onlyDependency) {}
92 
93     MachineInstr *getMemOperation() const { return MemOperation; }
94 
95     MachineInstr *getCheckOperation() const { return CheckOperation; }
96 
97     MachineBasicBlock *getCheckBlock() const { return CheckBlock; }
98 
99     MachineBasicBlock *getNotNullSucc() const { return NotNullSucc; }
100 
101     MachineBasicBlock *getNullSucc() const { return NullSucc; }
102 
103     MachineInstr *getOnlyDependency() const { return OnlyDependency; }
104   };
105 
106   const TargetInstrInfo *TII = nullptr;
107   const TargetRegisterInfo *TRI = nullptr;
108   AliasAnalysis *AA = nullptr;
109   MachineModuleInfo *MMI = nullptr;
110 
111   bool analyzeBlockForNullChecks(MachineBasicBlock &MBB,
112                                  SmallVectorImpl<NullCheck> &NullCheckList);
113   MachineInstr *insertFaultingLoad(MachineInstr *LoadMI, MachineBasicBlock *MBB,
114                                    MachineBasicBlock *HandlerMBB);
115   void rewriteNullChecks(ArrayRef<NullCheck> NullCheckList);
116 
117 public:
118   static char ID;
119 
120   ImplicitNullChecks() : MachineFunctionPass(ID) {
121     initializeImplicitNullChecksPass(*PassRegistry::getPassRegistry());
122   }
123 
124   bool runOnMachineFunction(MachineFunction &MF) override;
125   void getAnalysisUsage(AnalysisUsage &AU) const override {
126     AU.addRequired<AAResultsWrapperPass>();
127     MachineFunctionPass::getAnalysisUsage(AU);
128   }
129 
130   MachineFunctionProperties getRequiredProperties() const override {
131     return MachineFunctionProperties().set(
132         MachineFunctionProperties::Property::NoVRegs);
133   }
134 };
135 
136 /// \brief Detect re-ordering hazards and dependencies.
137 ///
138 /// This class keeps track of defs and uses, and can be queried if a given
139 /// machine instruction can be re-ordered from after the machine instructions
140 /// seen so far to before them.
141 class HazardDetector {
142   static MachineInstr *getUnknownMI() {
143     return DenseMapInfo<MachineInstr *>::getTombstoneKey();
144   }
145 
146   // Maps physical registers to the instruction defining them.  If there has
147   // been more than one def of an specific register, that register is mapped to
148   // getUnknownMI().
149   DenseMap<unsigned, MachineInstr *> RegDefs;
150   DenseSet<unsigned> RegUses;
151   const TargetRegisterInfo &TRI;
152   bool hasSeenClobber;
153   AliasAnalysis &AA;
154 
155 public:
156   explicit HazardDetector(const TargetRegisterInfo &TRI, AliasAnalysis &AA)
157       : TRI(TRI), hasSeenClobber(false), AA(AA) {}
158 
159   /// \brief Make a note of \p MI for later queries to isSafeToHoist.
160   ///
161   /// May clobber this HazardDetector instance.  \see isClobbered.
162   void rememberInstruction(MachineInstr *MI);
163 
164   /// \brief Return true if it is safe to hoist \p MI from after all the
165   /// instructions seen so far (via rememberInstruction) to before it.  If \p MI
166   /// has one and only one transitive dependency, set \p Dependency to that
167   /// instruction.  If there are more dependencies, return false.
168   bool isSafeToHoist(MachineInstr *MI, MachineInstr *&Dependency);
169 
170   /// \brief Return true if this instance of HazardDetector has been clobbered
171   /// (i.e. has no more useful information).
172   ///
173   /// A HazardDetecter is clobbered when it sees a construct it cannot
174   /// understand, and it would have to return a conservative answer for all
175   /// future queries.  Having a separate clobbered state lets the client code
176   /// bail early, without making queries about all of the future instructions
177   /// (which would have returned the most conservative answer anyway).
178   ///
179   /// Calling rememberInstruction or isSafeToHoist on a clobbered HazardDetector
180   /// is an error.
181   bool isClobbered() { return hasSeenClobber; }
182 };
183 }
184 
185 
186 void HazardDetector::rememberInstruction(MachineInstr *MI) {
187   assert(!isClobbered() &&
188          "Don't add instructions to a clobbered hazard detector");
189 
190   // There may be readonly calls that we can handle in theory, but for
191   // now we don't bother since we don't handle callee clobbered
192   // registers.
193   if (MI->isCall() || MI->mayStore() || MI->hasUnmodeledSideEffects()) {
194     hasSeenClobber = true;
195     return;
196   }
197 
198   for (auto *MMO : MI->memoperands()) {
199     // Right now we don't want to worry about LLVM's memory model.
200     if (!MMO->isUnordered()) {
201       hasSeenClobber = true;
202       return;
203     }
204   }
205 
206   for (auto &MO : MI->operands()) {
207     if (!MO.isReg() || !MO.getReg())
208       continue;
209 
210     if (MO.isDef()) {
211       auto It = RegDefs.find(MO.getReg());
212       if (It == RegDefs.end())
213         RegDefs.insert({MO.getReg(), MI});
214       else {
215         assert(It->second && "Found null MI?");
216         It->second = getUnknownMI();
217       }
218     } else
219       RegUses.insert(MO.getReg());
220   }
221 }
222 
223 bool HazardDetector::isSafeToHoist(MachineInstr *MI,
224                                    MachineInstr *&Dependency) {
225   assert(!isClobbered() && "isSafeToHoist cannot do anything useful!");
226   Dependency = nullptr;
227 
228   // Right now we don't want to worry about LLVM's memory model.  This can be
229   // made more precise later.
230   for (auto *MMO : MI->memoperands())
231     if (!MMO->isUnordered())
232       return false;
233 
234   for (auto &MO : MI->operands()) {
235     if (MO.isReg() && MO.getReg()) {
236       for (auto &RegDef : RegDefs) {
237         unsigned Reg = RegDef.first;
238         MachineInstr *MI = RegDef.second;
239         if (!TRI.regsOverlap(Reg, MO.getReg()))
240           continue;
241 
242         // We found a write-after-write or read-after-write, see if the
243         // instruction causing this dependency can be hoisted too.
244 
245         if (MI == getUnknownMI())
246           // We don't have precise dependency information.
247           return false;
248 
249         if (Dependency) {
250           if (Dependency == MI)
251             continue;
252           // We already have one dependency, and we can track only one.
253           return false;
254         }
255 
256         // Now check if MI is actually a dependency that can be hoisted.
257 
258         // We don't want to track transitive dependencies.  We already know that
259         // MI is the only instruction that defines Reg, but we need to be sure
260         // that it does not use any registers that have been defined (trivially
261         // checked below by ensuring that there are no register uses), and that
262         // it is the only def for every register it defines (otherwise we could
263         // violate a write after write hazard).
264         auto IsMIOperandSafe = [&](MachineOperand &MO) {
265           if (!MO.isReg() || !MO.getReg())
266             return true;
267           if (MO.isUse())
268             return false;
269           assert(MO.isDef() &&
270                  "Register MachineOperands must either be uses or be defs.");
271           assert(RegDefs.count(MO.getReg()) &&
272                  "All defs must be tracked in RegDefs by now!");
273 
274           for (unsigned Reg : RegUses)
275             if (TRI.regsOverlap(Reg, MO.getReg()))
276               return false; // We found a write-after-read
277 
278           for (auto &OtherDef : RegDefs) {
279             unsigned OtherReg = OtherDef.first;
280             MachineInstr *OtherMI = OtherDef.second;
281             if (OtherMI != MI && TRI.regsOverlap(OtherReg, MO.getReg()))
282               return false;
283           }
284 
285           return true;
286         };
287 
288         if (!all_of(MI->operands(), IsMIOperandSafe))
289           return false;
290 
291         // Now check for speculation safety:
292         bool SawStore = true;
293         if (!MI->isSafeToMove(&AA, SawStore) || MI->mayLoad())
294           return false;
295 
296         Dependency = MI;
297       }
298 
299       if (MO.isDef())
300         for (unsigned Reg : RegUses)
301           if (TRI.regsOverlap(Reg, MO.getReg()))
302             return false;  // We found a write-after-read
303     }
304   }
305 
306   return true;
307 }
308 
309 bool ImplicitNullChecks::runOnMachineFunction(MachineFunction &MF) {
310   TII = MF.getSubtarget().getInstrInfo();
311   TRI = MF.getRegInfo().getTargetRegisterInfo();
312   MMI = &MF.getMMI();
313   AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
314 
315   SmallVector<NullCheck, 16> NullCheckList;
316 
317   for (auto &MBB : MF)
318     analyzeBlockForNullChecks(MBB, NullCheckList);
319 
320   if (!NullCheckList.empty())
321     rewriteNullChecks(NullCheckList);
322 
323   return !NullCheckList.empty();
324 }
325 
326 // Return true if any register aliasing \p Reg is live-in into \p MBB.
327 static bool AnyAliasLiveIn(const TargetRegisterInfo *TRI,
328                            MachineBasicBlock *MBB, unsigned Reg) {
329   for (MCRegAliasIterator AR(Reg, TRI, /*IncludeSelf*/ true); AR.isValid();
330        ++AR)
331     if (MBB->isLiveIn(*AR))
332       return true;
333   return false;
334 }
335 
336 /// Analyze MBB to check if its terminating branch can be turned into an
337 /// implicit null check.  If yes, append a description of the said null check to
338 /// NullCheckList and return true, else return false.
339 bool ImplicitNullChecks::analyzeBlockForNullChecks(
340     MachineBasicBlock &MBB, SmallVectorImpl<NullCheck> &NullCheckList) {
341   typedef TargetInstrInfo::MachineBranchPredicate MachineBranchPredicate;
342 
343   MDNode *BranchMD = nullptr;
344   if (auto *BB = MBB.getBasicBlock())
345     BranchMD = BB->getTerminator()->getMetadata(LLVMContext::MD_make_implicit);
346 
347   if (!BranchMD)
348     return false;
349 
350   MachineBranchPredicate MBP;
351 
352   if (TII->analyzeBranchPredicate(MBB, MBP, true))
353     return false;
354 
355   // Is the predicate comparing an integer to zero?
356   if (!(MBP.LHS.isReg() && MBP.RHS.isImm() && MBP.RHS.getImm() == 0 &&
357         (MBP.Predicate == MachineBranchPredicate::PRED_NE ||
358          MBP.Predicate == MachineBranchPredicate::PRED_EQ)))
359     return false;
360 
361   // If we cannot erase the test instruction itself, then making the null check
362   // implicit does not buy us much.
363   if (!MBP.SingleUseCondition)
364     return false;
365 
366   MachineBasicBlock *NotNullSucc, *NullSucc;
367 
368   if (MBP.Predicate == MachineBranchPredicate::PRED_NE) {
369     NotNullSucc = MBP.TrueDest;
370     NullSucc = MBP.FalseDest;
371   } else {
372     NotNullSucc = MBP.FalseDest;
373     NullSucc = MBP.TrueDest;
374   }
375 
376   // We handle the simplest case for now.  We can potentially do better by using
377   // the machine dominator tree.
378   if (NotNullSucc->pred_size() != 1)
379     return false;
380 
381   // Starting with a code fragment like:
382   //
383   //   test %RAX, %RAX
384   //   jne LblNotNull
385   //
386   //  LblNull:
387   //   callq throw_NullPointerException
388   //
389   //  LblNotNull:
390   //   Inst0
391   //   Inst1
392   //   ...
393   //   Def = Load (%RAX + <offset>)
394   //   ...
395   //
396   //
397   // we want to end up with
398   //
399   //   Def = FaultingLoad (%RAX + <offset>), LblNull
400   //   jmp LblNotNull ;; explicit or fallthrough
401   //
402   //  LblNotNull:
403   //   Inst0
404   //   Inst1
405   //   ...
406   //
407   //  LblNull:
408   //   callq throw_NullPointerException
409   //
410   //
411   // To see why this is legal, consider the two possibilities:
412   //
413   //  1. %RAX is null: since we constrain <offset> to be less than PageSize, the
414   //     load instruction dereferences the null page, causing a segmentation
415   //     fault.
416   //
417   //  2. %RAX is not null: in this case we know that the load cannot fault, as
418   //     otherwise the load would've faulted in the original program too and the
419   //     original program would've been undefined.
420   //
421   // This reasoning cannot be extended to justify hoisting through arbitrary
422   // control flow.  For instance, in the example below (in pseudo-C)
423   //
424   //    if (ptr == null) { throw_npe(); unreachable; }
425   //    if (some_cond) { return 42; }
426   //    v = ptr->field;  // LD
427   //    ...
428   //
429   // we cannot (without code duplication) use the load marked "LD" to null check
430   // ptr -- clause (2) above does not apply in this case.  In the above program
431   // the safety of ptr->field can be dependent on some_cond; and, for instance,
432   // ptr could be some non-null invalid reference that never gets loaded from
433   // because some_cond is always true.
434 
435   unsigned PointerReg = MBP.LHS.getReg();
436 
437   HazardDetector HD(*TRI, *AA);
438 
439   for (auto MII = NotNullSucc->begin(), MIE = NotNullSucc->end(); MII != MIE;
440        ++MII) {
441     MachineInstr &MI = *MII;
442     unsigned BaseReg;
443     int64_t Offset;
444     MachineInstr *Dependency = nullptr;
445     if (TII->getMemOpBaseRegImmOfs(MI, BaseReg, Offset, TRI))
446       if (MI.mayLoad() && !MI.isPredicable() && BaseReg == PointerReg &&
447           Offset < PageSize && MI.getDesc().getNumDefs() <= 1 &&
448           HD.isSafeToHoist(&MI, Dependency)) {
449 
450         auto DependencyOperandIsOk = [&](MachineOperand &MO) {
451           assert(!(MO.isReg() && MO.isUse()) &&
452                  "No transitive dependendencies please!");
453           if (!MO.isReg() || !MO.getReg() || !MO.isDef())
454             return true;
455 
456           // Make sure that we won't clobber any live ins to the sibling block
457           // by hoisting Dependency.  For instance, we can't hoist INST to
458           // before the null check (even if it safe, and does not violate any
459           // dependencies in the non_null_block) if %rdx is live in to
460           // _null_block.
461           //
462           //    test %rcx, %rcx
463           //    je _null_block
464           //  _non_null_block:
465           //    %rdx<def> = INST
466           //    ...
467           if (AnyAliasLiveIn(TRI, NullSucc, MO.getReg()))
468             return false;
469 
470           // Make sure Dependency isn't re-defining the base register.  Then we
471           // won't get the memory operation on the address we want.
472           if (TRI->regsOverlap(MO.getReg(), BaseReg))
473             return false;
474 
475           return true;
476         };
477 
478         bool DependencyOperandsAreOk =
479             !Dependency ||
480             all_of(Dependency->operands(), DependencyOperandIsOk);
481 
482         if (DependencyOperandsAreOk) {
483           NullCheckList.emplace_back(&MI, MBP.ConditionDef, &MBB, NotNullSucc,
484                                      NullSucc, Dependency);
485           return true;
486         }
487       }
488 
489     HD.rememberInstruction(&MI);
490     if (HD.isClobbered())
491       return false;
492   }
493 
494   return false;
495 }
496 
497 /// Wrap a machine load instruction, LoadMI, into a FAULTING_LOAD_OP machine
498 /// instruction.  The FAULTING_LOAD_OP instruction does the same load as LoadMI
499 /// (defining the same register), and branches to HandlerMBB if the load
500 /// faults.  The FAULTING_LOAD_OP instruction is inserted at the end of MBB.
501 MachineInstr *
502 ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI,
503                                        MachineBasicBlock *MBB,
504                                        MachineBasicBlock *HandlerMBB) {
505   const unsigned NoRegister = 0; // Guaranteed to be the NoRegister value for
506                                  // all targets.
507 
508   DebugLoc DL;
509   unsigned NumDefs = LoadMI->getDesc().getNumDefs();
510   assert(NumDefs <= 1 && "other cases unhandled!");
511 
512   unsigned DefReg = NoRegister;
513   if (NumDefs != 0) {
514     DefReg = LoadMI->defs().begin()->getReg();
515     assert(std::distance(LoadMI->defs().begin(), LoadMI->defs().end()) == 1 &&
516            "expected exactly one def!");
517   }
518 
519   auto MIB = BuildMI(MBB, DL, TII->get(TargetOpcode::FAULTING_LOAD_OP), DefReg)
520                  .addMBB(HandlerMBB)
521                  .addImm(LoadMI->getOpcode());
522 
523   for (auto &MO : LoadMI->uses())
524     MIB.addOperand(MO);
525 
526   MIB.setMemRefs(LoadMI->memoperands_begin(), LoadMI->memoperands_end());
527 
528   return MIB;
529 }
530 
531 /// Rewrite the null checks in NullCheckList into implicit null checks.
532 void ImplicitNullChecks::rewriteNullChecks(
533     ArrayRef<ImplicitNullChecks::NullCheck> NullCheckList) {
534   DebugLoc DL;
535 
536   for (auto &NC : NullCheckList) {
537     // Remove the conditional branch dependent on the null check.
538     unsigned BranchesRemoved = TII->removeBranch(*NC.getCheckBlock());
539     (void)BranchesRemoved;
540     assert(BranchesRemoved > 0 && "expected at least one branch!");
541 
542     if (auto *DepMI = NC.getOnlyDependency()) {
543       DepMI->removeFromParent();
544       NC.getCheckBlock()->insert(NC.getCheckBlock()->end(), DepMI);
545     }
546 
547     // Insert a faulting load where the conditional branch was originally.  We
548     // check earlier ensures that this bit of code motion is legal.  We do not
549     // touch the successors list for any basic block since we haven't changed
550     // control flow, we've just made it implicit.
551     MachineInstr *FaultingLoad = insertFaultingLoad(
552         NC.getMemOperation(), NC.getCheckBlock(), NC.getNullSucc());
553     // Now the values defined by MemOperation, if any, are live-in of
554     // the block of MemOperation.
555     // The original load operation may define implicit-defs alongside
556     // the loaded value.
557     MachineBasicBlock *MBB = NC.getMemOperation()->getParent();
558     for (const MachineOperand &MO : FaultingLoad->operands()) {
559       if (!MO.isReg() || !MO.isDef())
560         continue;
561       unsigned Reg = MO.getReg();
562       if (!Reg || MBB->isLiveIn(Reg))
563         continue;
564       MBB->addLiveIn(Reg);
565     }
566 
567     if (auto *DepMI = NC.getOnlyDependency()) {
568       for (auto &MO : DepMI->operands()) {
569         if (!MO.isReg() || !MO.getReg() || !MO.isDef())
570           continue;
571         if (!NC.getNotNullSucc()->isLiveIn(MO.getReg()))
572           NC.getNotNullSucc()->addLiveIn(MO.getReg());
573       }
574     }
575 
576     NC.getMemOperation()->eraseFromParent();
577     NC.getCheckOperation()->eraseFromParent();
578 
579     // Insert an *unconditional* branch to not-null successor.
580     TII->insertBranch(*NC.getCheckBlock(), NC.getNotNullSucc(), nullptr,
581                       /*Cond=*/None, DL);
582 
583     NumImplicitNullChecks++;
584   }
585 }
586 
587 char ImplicitNullChecks::ID = 0;
588 char &llvm::ImplicitNullChecksID = ImplicitNullChecks::ID;
589 INITIALIZE_PASS_BEGIN(ImplicitNullChecks, "implicit-null-checks",
590                       "Implicit null checks", false, false)
591 INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
592 INITIALIZE_PASS_END(ImplicitNullChecks, "implicit-null-checks",
593                     "Implicit null checks", false, false)
594