1 //===- VarLocBasedImpl.cpp - Tracking Debug Value MIs with VarLoc class----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 /// 9 /// \file VarLocBasedImpl.cpp 10 /// 11 /// LiveDebugValues is an optimistic "available expressions" dataflow 12 /// algorithm. The set of expressions is the set of machine locations 13 /// (registers, spill slots, constants) that a variable fragment might be 14 /// located, qualified by a DIExpression and indirect-ness flag, while each 15 /// variable is identified by a DebugVariable object. The availability of an 16 /// expression begins when a DBG_VALUE instruction specifies the location of a 17 /// DebugVariable, and continues until that location is clobbered or 18 /// re-specified by a different DBG_VALUE for the same DebugVariable. 19 /// 20 /// The output of LiveDebugValues is additional DBG_VALUE instructions, 21 /// placed to extend variable locations as far they're available. This file 22 /// and the VarLocBasedLDV class is an implementation that explicitly tracks 23 /// locations, using the VarLoc class. 24 /// 25 /// The canonical "available expressions" problem doesn't have expression 26 /// clobbering, instead when a variable is re-assigned, any expressions using 27 /// that variable get invalidated. LiveDebugValues can map onto "available 28 /// expressions" by having every register represented by a variable, which is 29 /// used in an expression that becomes available at a DBG_VALUE instruction. 30 /// When the register is clobbered, its variable is effectively reassigned, and 31 /// expressions computed from it become unavailable. A similar construct is 32 /// needed when a DebugVariable has its location re-specified, to invalidate 33 /// all other locations for that DebugVariable. 34 /// 35 /// Using the dataflow analysis to compute the available expressions, we create 36 /// a DBG_VALUE at the beginning of each block where the expression is 37 /// live-in. This propagates variable locations into every basic block where 38 /// the location can be determined, rather than only having DBG_VALUEs in blocks 39 /// where locations are specified due to an assignment or some optimization. 40 /// Movements of values between registers and spill slots are annotated with 41 /// DBG_VALUEs too to track variable values bewteen locations. All this allows 42 /// DbgEntityHistoryCalculator to focus on only the locations within individual 43 /// blocks, facilitating testing and improving modularity. 44 /// 45 /// We follow an optimisic dataflow approach, with this lattice: 46 /// 47 /// \verbatim 48 /// ┬ "Unknown" 49 /// | 50 /// v 51 /// True 52 /// | 53 /// v 54 /// ⊥ False 55 /// \endverbatim With "True" signifying that the expression is available (and 56 /// thus a DebugVariable's location is the corresponding register), while 57 /// "False" signifies that the expression is unavailable. "Unknown"s never 58 /// survive to the end of the analysis (see below). 59 /// 60 /// Formally, all DebugVariable locations that are live-out of a block are 61 /// initialized to \top. A blocks live-in values take the meet of the lattice 62 /// value for every predecessors live-outs, except for the entry block, where 63 /// all live-ins are \bot. The usual dataflow propagation occurs: the transfer 64 /// function for a block assigns an expression for a DebugVariable to be "True" 65 /// if a DBG_VALUE in the block specifies it; "False" if the location is 66 /// clobbered; or the live-in value if it is unaffected by the block. We 67 /// visit each block in reverse post order until a fixedpoint is reached. The 68 /// solution produced is maximal. 69 /// 70 /// Intuitively, we start by assuming that every expression / variable location 71 /// is at least "True", and then propagate "False" from the entry block and any 72 /// clobbers until there are no more changes to make. This gives us an accurate 73 /// solution because all incorrect locations will have a "False" propagated into 74 /// them. It also gives us a solution that copes well with loops by assuming 75 /// that variable locations are live-through every loop, and then removing those 76 /// that are not through dataflow. 77 /// 78 /// Within LiveDebugValues: each variable location is represented by a 79 /// VarLoc object that identifies the source variable, the set of 80 /// machine-locations that currently describe it (a single location for 81 /// DBG_VALUE or multiple for DBG_VALUE_LIST), and the DBG_VALUE inst that 82 /// specifies the location. Each VarLoc is indexed in the (function-scope) \p 83 /// VarLocMap, giving each VarLoc a set of unique indexes, each of which 84 /// corresponds to one of the VarLoc's machine-locations and can be used to 85 /// lookup the VarLoc in the VarLocMap. Rather than operate directly on machine 86 /// locations, the dataflow analysis in this pass identifies locations by their 87 /// indices in the VarLocMap, meaning all the variable locations in a block can 88 /// be described by a sparse vector of VarLocMap indicies. 89 /// 90 /// All the storage for the dataflow analysis is local to the ExtendRanges 91 /// method and passed down to helper methods. "OutLocs" and "InLocs" record the 92 /// in and out lattice values for each block. "OpenRanges" maintains a list of 93 /// variable locations and, with the "process" method, evaluates the transfer 94 /// function of each block. "flushPendingLocs" installs debug value instructions 95 /// for each live-in location at the start of blocks, while "Transfers" records 96 /// transfers of values between machine-locations. 97 /// 98 /// We avoid explicitly representing the "Unknown" (\top) lattice value in the 99 /// implementation. Instead, unvisited blocks implicitly have all lattice 100 /// values set as "Unknown". After being visited, there will be path back to 101 /// the entry block where the lattice value is "False", and as the transfer 102 /// function cannot make new "Unknown" locations, there are no scenarios where 103 /// a block can have an "Unknown" location after being visited. Similarly, we 104 /// don't enumerate all possible variable locations before exploring the 105 /// function: when a new location is discovered, all blocks previously explored 106 /// were implicitly "False" but unrecorded, and become explicitly "False" when 107 /// a new VarLoc is created with its bit not set in predecessor InLocs or 108 /// OutLocs. 109 /// 110 //===----------------------------------------------------------------------===// 111 112 #include "LiveDebugValues.h" 113 114 #include "llvm/ADT/CoalescingBitVector.h" 115 #include "llvm/ADT/DenseMap.h" 116 #include "llvm/ADT/PostOrderIterator.h" 117 #include "llvm/ADT/SmallPtrSet.h" 118 #include "llvm/ADT/SmallSet.h" 119 #include "llvm/ADT/SmallVector.h" 120 #include "llvm/ADT/Statistic.h" 121 #include "llvm/ADT/UniqueVector.h" 122 #include "llvm/CodeGen/LexicalScopes.h" 123 #include "llvm/CodeGen/MachineBasicBlock.h" 124 #include "llvm/CodeGen/MachineFrameInfo.h" 125 #include "llvm/CodeGen/MachineFunction.h" 126 #include "llvm/CodeGen/MachineFunctionPass.h" 127 #include "llvm/CodeGen/MachineInstr.h" 128 #include "llvm/CodeGen/MachineInstrBuilder.h" 129 #include "llvm/CodeGen/MachineMemOperand.h" 130 #include "llvm/CodeGen/MachineOperand.h" 131 #include "llvm/CodeGen/PseudoSourceValue.h" 132 #include "llvm/CodeGen/RegisterScavenging.h" 133 #include "llvm/CodeGen/TargetFrameLowering.h" 134 #include "llvm/CodeGen/TargetInstrInfo.h" 135 #include "llvm/CodeGen/TargetLowering.h" 136 #include "llvm/CodeGen/TargetPassConfig.h" 137 #include "llvm/CodeGen/TargetRegisterInfo.h" 138 #include "llvm/CodeGen/TargetSubtargetInfo.h" 139 #include "llvm/Config/llvm-config.h" 140 #include "llvm/IR/DIBuilder.h" 141 #include "llvm/IR/DebugInfoMetadata.h" 142 #include "llvm/IR/DebugLoc.h" 143 #include "llvm/IR/Function.h" 144 #include "llvm/IR/Module.h" 145 #include "llvm/InitializePasses.h" 146 #include "llvm/MC/MCRegisterInfo.h" 147 #include "llvm/Pass.h" 148 #include "llvm/Support/Casting.h" 149 #include "llvm/Support/Compiler.h" 150 #include "llvm/Support/Debug.h" 151 #include "llvm/Support/TypeSize.h" 152 #include "llvm/Support/raw_ostream.h" 153 #include "llvm/Target/TargetMachine.h" 154 #include <algorithm> 155 #include <cassert> 156 #include <cstdint> 157 #include <functional> 158 #include <map> 159 #include <queue> 160 #include <tuple> 161 #include <utility> 162 #include <vector> 163 164 using namespace llvm; 165 166 #define DEBUG_TYPE "livedebugvalues" 167 168 STATISTIC(NumInserted, "Number of DBG_VALUE instructions inserted"); 169 170 /// If \p Op is a stack or frame register return true, otherwise return false. 171 /// This is used to avoid basing the debug entry values on the registers, since 172 /// we do not support it at the moment. 173 static bool isRegOtherThanSPAndFP(const MachineOperand &Op, 174 const MachineInstr &MI, 175 const TargetRegisterInfo *TRI) { 176 if (!Op.isReg()) 177 return false; 178 179 const MachineFunction *MF = MI.getParent()->getParent(); 180 const TargetLowering *TLI = MF->getSubtarget().getTargetLowering(); 181 Register SP = TLI->getStackPointerRegisterToSaveRestore(); 182 Register FP = TRI->getFrameRegister(*MF); 183 Register Reg = Op.getReg(); 184 185 return Reg && Reg != SP && Reg != FP; 186 } 187 188 namespace { 189 190 // Max out the number of statically allocated elements in DefinedRegsSet, as 191 // this prevents fallback to std::set::count() operations. 192 using DefinedRegsSet = SmallSet<Register, 32>; 193 194 // The IDs in this set correspond to MachineLocs in VarLocs, as well as VarLocs 195 // that represent Entry Values; every VarLoc in the set will also appear 196 // exactly once at Location=0. 197 // As a result, each VarLoc may appear more than once in this "set", but each 198 // range corresponding to a Reg, SpillLoc, or EntryValue type will still be a 199 // "true" set (i.e. each VarLoc may appear only once), and the range Location=0 200 // is the set of all VarLocs. 201 using VarLocSet = CoalescingBitVector<uint64_t>; 202 203 /// A type-checked pair of {Register Location (or 0), Index}, used to index 204 /// into a \ref VarLocMap. This can be efficiently converted to a 64-bit int 205 /// for insertion into a \ref VarLocSet, and efficiently converted back. The 206 /// type-checker helps ensure that the conversions aren't lossy. 207 /// 208 /// Why encode a location /into/ the VarLocMap index? This makes it possible 209 /// to find the open VarLocs killed by a register def very quickly. This is a 210 /// performance-critical operation for LiveDebugValues. 211 struct LocIndex { 212 using u32_location_t = uint32_t; 213 using u32_index_t = uint32_t; 214 215 u32_location_t Location; // Physical registers live in the range [1;2^30) (see 216 // \ref MCRegister), so we have plenty of range left 217 // here to encode non-register locations. 218 u32_index_t Index; 219 220 /// The location that has an entry for every VarLoc in the map. 221 static constexpr u32_location_t kUniversalLocation = 0; 222 223 /// The first location that is reserved for VarLocs with locations of kind 224 /// RegisterKind. 225 static constexpr u32_location_t kFirstRegLocation = 1; 226 227 /// The first location greater than 0 that is not reserved for VarLocs with 228 /// locations of kind RegisterKind. 229 static constexpr u32_location_t kFirstInvalidRegLocation = 1 << 30; 230 231 /// A special location reserved for VarLocs with locations of kind 232 /// SpillLocKind. 233 static constexpr u32_location_t kSpillLocation = kFirstInvalidRegLocation; 234 235 /// A special location reserved for VarLocs of kind EntryValueBackupKind and 236 /// EntryValueCopyBackupKind. 237 static constexpr u32_location_t kEntryValueBackupLocation = 238 kFirstInvalidRegLocation + 1; 239 240 LocIndex(u32_location_t Location, u32_index_t Index) 241 : Location(Location), Index(Index) {} 242 243 uint64_t getAsRawInteger() const { 244 return (static_cast<uint64_t>(Location) << 32) | Index; 245 } 246 247 template<typename IntT> static LocIndex fromRawInteger(IntT ID) { 248 static_assert(std::is_unsigned<IntT>::value && 249 sizeof(ID) == sizeof(uint64_t), 250 "Cannot convert raw integer to LocIndex"); 251 return {static_cast<u32_location_t>(ID >> 32), 252 static_cast<u32_index_t>(ID)}; 253 } 254 255 /// Get the start of the interval reserved for VarLocs of kind RegisterKind 256 /// which reside in \p Reg. The end is at rawIndexForReg(Reg+1)-1. 257 static uint64_t rawIndexForReg(Register Reg) { 258 return LocIndex(Reg, 0).getAsRawInteger(); 259 } 260 261 /// Return a range covering all set indices in the interval reserved for 262 /// \p Location in \p Set. 263 static auto indexRangeForLocation(const VarLocSet &Set, 264 u32_location_t Location) { 265 uint64_t Start = LocIndex(Location, 0).getAsRawInteger(); 266 uint64_t End = LocIndex(Location + 1, 0).getAsRawInteger(); 267 return Set.half_open_range(Start, End); 268 } 269 }; 270 271 // Simple Set for storing all the VarLoc Indices at a Location bucket. 272 using VarLocsInRange = SmallSet<LocIndex::u32_index_t, 32>; 273 // Vector of all `LocIndex`s for a given VarLoc; the same Location should not 274 // appear in any two of these, as each VarLoc appears at most once in any 275 // Location bucket. 276 using LocIndices = SmallVector<LocIndex, 2>; 277 278 class VarLocBasedLDV : public LDVImpl { 279 private: 280 const TargetRegisterInfo *TRI; 281 const TargetInstrInfo *TII; 282 const TargetFrameLowering *TFI; 283 TargetPassConfig *TPC; 284 BitVector CalleeSavedRegs; 285 LexicalScopes LS; 286 VarLocSet::Allocator Alloc; 287 288 const MachineInstr *LastNonDbgMI; 289 290 enum struct TransferKind { TransferCopy, TransferSpill, TransferRestore }; 291 292 using FragmentInfo = DIExpression::FragmentInfo; 293 using OptFragmentInfo = Optional<DIExpression::FragmentInfo>; 294 295 /// A pair of debug variable and value location. 296 struct VarLoc { 297 // The location at which a spilled variable resides. It consists of a 298 // register and an offset. 299 struct SpillLoc { 300 unsigned SpillBase; 301 StackOffset SpillOffset; 302 bool operator==(const SpillLoc &Other) const { 303 return SpillBase == Other.SpillBase && SpillOffset == Other.SpillOffset; 304 } 305 bool operator!=(const SpillLoc &Other) const { 306 return !(*this == Other); 307 } 308 }; 309 310 /// Identity of the variable at this location. 311 const DebugVariable Var; 312 313 /// The expression applied to this location. 314 const DIExpression *Expr; 315 316 /// DBG_VALUE to clone var/expr information from if this location 317 /// is moved. 318 const MachineInstr &MI; 319 320 enum class MachineLocKind { 321 InvalidKind = 0, 322 RegisterKind, 323 SpillLocKind, 324 ImmediateKind 325 }; 326 327 enum class EntryValueLocKind { 328 NonEntryValueKind = 0, 329 EntryValueKind, 330 EntryValueBackupKind, 331 EntryValueCopyBackupKind 332 } EVKind; 333 334 /// The value location. Stored separately to avoid repeatedly 335 /// extracting it from MI. 336 union MachineLocValue { 337 uint64_t RegNo; 338 SpillLoc SpillLocation; 339 uint64_t Hash; 340 int64_t Immediate; 341 const ConstantFP *FPImm; 342 const ConstantInt *CImm; 343 MachineLocValue() : Hash(0) {} 344 }; 345 346 /// A single machine location; its Kind is either a register, spill 347 /// location, or immediate value. 348 /// If the VarLoc is not a NonEntryValueKind, then it will use only a 349 /// single MachineLoc of RegisterKind. 350 struct MachineLoc { 351 MachineLocKind Kind; 352 MachineLocValue Value; 353 bool operator==(const MachineLoc &Other) const { 354 if (Kind != Other.Kind) 355 return false; 356 switch (Kind) { 357 case MachineLocKind::SpillLocKind: 358 return Value.SpillLocation == Other.Value.SpillLocation; 359 case MachineLocKind::RegisterKind: 360 case MachineLocKind::ImmediateKind: 361 return Value.Hash == Other.Value.Hash; 362 default: 363 llvm_unreachable("Invalid kind"); 364 } 365 } 366 bool operator<(const MachineLoc &Other) const { 367 switch (Kind) { 368 case MachineLocKind::SpillLocKind: 369 return std::make_tuple( 370 Kind, Value.SpillLocation.SpillBase, 371 Value.SpillLocation.SpillOffset.getFixed(), 372 Value.SpillLocation.SpillOffset.getScalable()) < 373 std::make_tuple( 374 Other.Kind, Other.Value.SpillLocation.SpillBase, 375 Other.Value.SpillLocation.SpillOffset.getFixed(), 376 Other.Value.SpillLocation.SpillOffset.getScalable()); 377 case MachineLocKind::RegisterKind: 378 case MachineLocKind::ImmediateKind: 379 return std::tie(Kind, Value.Hash) < 380 std::tie(Other.Kind, Other.Value.Hash); 381 default: 382 llvm_unreachable("Invalid kind"); 383 } 384 } 385 }; 386 387 /// The set of machine locations used to determine the variable's value, in 388 /// conjunction with Expr. Initially populated with MI's debug operands, 389 /// but may be transformed independently afterwards. 390 SmallVector<MachineLoc, 8> Locs; 391 /// Used to map the index of each location in Locs back to the index of its 392 /// original debug operand in MI. Used when multiple location operands are 393 /// coalesced and the original MI's operands need to be accessed while 394 /// emitting a debug value. 395 SmallVector<unsigned, 8> OrigLocMap; 396 397 VarLoc(const MachineInstr &MI, LexicalScopes &LS) 398 : Var(MI.getDebugVariable(), MI.getDebugExpression(), 399 MI.getDebugLoc()->getInlinedAt()), 400 Expr(MI.getDebugExpression()), MI(MI), 401 EVKind(EntryValueLocKind::NonEntryValueKind) { 402 assert(MI.isDebugValue() && "not a DBG_VALUE"); 403 assert((MI.isDebugValueList() || MI.getNumOperands() == 4) && 404 "malformed DBG_VALUE"); 405 for (const MachineOperand &Op : MI.debug_operands()) { 406 MachineLoc ML = GetLocForOp(Op); 407 auto It = find(Locs, ML); 408 if (It == Locs.end()) { 409 Locs.push_back(ML); 410 OrigLocMap.push_back(MI.getDebugOperandIndex(&Op)); 411 } else { 412 // ML duplicates an element in Locs; replace references to Op 413 // with references to the duplicating element. 414 unsigned OpIdx = Locs.size(); 415 unsigned DuplicatingIdx = std::distance(Locs.begin(), It); 416 Expr = DIExpression::replaceArg(Expr, OpIdx, DuplicatingIdx); 417 } 418 } 419 420 // We create the debug entry values from the factory functions rather 421 // than from this ctor. 422 assert(EVKind != EntryValueLocKind::EntryValueKind && 423 !isEntryBackupLoc()); 424 } 425 426 static MachineLoc GetLocForOp(const MachineOperand &Op) { 427 MachineLocKind Kind; 428 MachineLocValue Loc; 429 if (Op.isReg()) { 430 Kind = MachineLocKind::RegisterKind; 431 Loc.RegNo = Op.getReg(); 432 } else if (Op.isImm()) { 433 Kind = MachineLocKind::ImmediateKind; 434 Loc.Immediate = Op.getImm(); 435 } else if (Op.isFPImm()) { 436 Kind = MachineLocKind::ImmediateKind; 437 Loc.FPImm = Op.getFPImm(); 438 } else if (Op.isCImm()) { 439 Kind = MachineLocKind::ImmediateKind; 440 Loc.CImm = Op.getCImm(); 441 } else 442 llvm_unreachable("Invalid Op kind for MachineLoc."); 443 return {Kind, Loc}; 444 } 445 446 /// Take the variable and machine-location in DBG_VALUE MI, and build an 447 /// entry location using the given expression. 448 static VarLoc CreateEntryLoc(const MachineInstr &MI, LexicalScopes &LS, 449 const DIExpression *EntryExpr, Register Reg) { 450 VarLoc VL(MI, LS); 451 assert(VL.Locs.size() == 1 && 452 VL.Locs[0].Kind == MachineLocKind::RegisterKind); 453 VL.EVKind = EntryValueLocKind::EntryValueKind; 454 VL.Expr = EntryExpr; 455 VL.Locs[0].Value.RegNo = Reg; 456 return VL; 457 } 458 459 /// Take the variable and machine-location from the DBG_VALUE (from the 460 /// function entry), and build an entry value backup location. The backup 461 /// location will turn into the normal location if the backup is valid at 462 /// the time of the primary location clobbering. 463 static VarLoc CreateEntryBackupLoc(const MachineInstr &MI, 464 LexicalScopes &LS, 465 const DIExpression *EntryExpr) { 466 VarLoc VL(MI, LS); 467 assert(VL.Locs.size() == 1 && 468 VL.Locs[0].Kind == MachineLocKind::RegisterKind); 469 VL.EVKind = EntryValueLocKind::EntryValueBackupKind; 470 VL.Expr = EntryExpr; 471 return VL; 472 } 473 474 /// Take the variable and machine-location from the DBG_VALUE (from the 475 /// function entry), and build a copy of an entry value backup location by 476 /// setting the register location to NewReg. 477 static VarLoc CreateEntryCopyBackupLoc(const MachineInstr &MI, 478 LexicalScopes &LS, 479 const DIExpression *EntryExpr, 480 Register NewReg) { 481 VarLoc VL(MI, LS); 482 assert(VL.Locs.size() == 1 && 483 VL.Locs[0].Kind == MachineLocKind::RegisterKind); 484 VL.EVKind = EntryValueLocKind::EntryValueCopyBackupKind; 485 VL.Expr = EntryExpr; 486 VL.Locs[0].Value.RegNo = NewReg; 487 return VL; 488 } 489 490 /// Copy the register location in DBG_VALUE MI, updating the register to 491 /// be NewReg. 492 static VarLoc CreateCopyLoc(const VarLoc &OldVL, const MachineLoc &OldML, 493 Register NewReg) { 494 VarLoc VL = OldVL; 495 for (size_t I = 0, E = VL.Locs.size(); I < E; ++I) 496 if (VL.Locs[I] == OldML) { 497 VL.Locs[I].Kind = MachineLocKind::RegisterKind; 498 VL.Locs[I].Value.RegNo = NewReg; 499 return VL; 500 } 501 llvm_unreachable("Should have found OldML in new VarLoc."); 502 } 503 504 /// Take the variable described by DBG_VALUE* MI, and create a VarLoc 505 /// locating it in the specified spill location. 506 static VarLoc CreateSpillLoc(const VarLoc &OldVL, const MachineLoc &OldML, 507 unsigned SpillBase, StackOffset SpillOffset) { 508 VarLoc VL = OldVL; 509 for (int I = 0, E = VL.Locs.size(); I < E; ++I) 510 if (VL.Locs[I] == OldML) { 511 VL.Locs[I].Kind = MachineLocKind::SpillLocKind; 512 VL.Locs[I].Value.SpillLocation = {SpillBase, SpillOffset}; 513 return VL; 514 } 515 llvm_unreachable("Should have found OldML in new VarLoc."); 516 } 517 518 /// Create a DBG_VALUE representing this VarLoc in the given function. 519 /// Copies variable-specific information such as DILocalVariable and 520 /// inlining information from the original DBG_VALUE instruction, which may 521 /// have been several transfers ago. 522 MachineInstr *BuildDbgValue(MachineFunction &MF) const { 523 assert(!isEntryBackupLoc() && 524 "Tried to produce DBG_VALUE for backup VarLoc"); 525 const DebugLoc &DbgLoc = MI.getDebugLoc(); 526 bool Indirect = MI.isIndirectDebugValue(); 527 const auto &IID = MI.getDesc(); 528 const DILocalVariable *Var = MI.getDebugVariable(); 529 NumInserted++; 530 531 const DIExpression *DIExpr = Expr; 532 SmallVector<MachineOperand, 8> MOs; 533 for (unsigned I = 0, E = Locs.size(); I < E; ++I) { 534 MachineLocKind LocKind = Locs[I].Kind; 535 MachineLocValue Loc = Locs[I].Value; 536 const MachineOperand &Orig = MI.getDebugOperand(OrigLocMap[I]); 537 switch (LocKind) { 538 case MachineLocKind::RegisterKind: 539 // An entry value is a register location -- but with an updated 540 // expression. The register location of such DBG_VALUE is always the 541 // one from the entry DBG_VALUE, it does not matter if the entry value 542 // was copied in to another register due to some optimizations. 543 // Non-entry value register locations are like the source 544 // DBG_VALUE, but with the register number from this VarLoc. 545 MOs.push_back(MachineOperand::CreateReg( 546 EVKind == EntryValueLocKind::EntryValueKind ? Orig.getReg() 547 : Register(Loc.RegNo), 548 false)); 549 MOs.back().setIsDebug(); 550 break; 551 case MachineLocKind::SpillLocKind: { 552 // Spills are indirect DBG_VALUEs, with a base register and offset. 553 // Use the original DBG_VALUEs expression to build the spilt location 554 // on top of. FIXME: spill locations created before this pass runs 555 // are not recognized, and not handled here. 556 unsigned Base = Loc.SpillLocation.SpillBase; 557 auto *TRI = MF.getSubtarget().getRegisterInfo(); 558 if (MI.isNonListDebugValue()) { 559 auto Deref = Indirect ? DIExpression::DerefAfter : 0; 560 DIExpr = TRI->prependOffsetExpression( 561 DIExpr, DIExpression::ApplyOffset | Deref, 562 Loc.SpillLocation.SpillOffset); 563 Indirect = true; 564 } else { 565 SmallVector<uint64_t, 4> Ops; 566 TRI->getOffsetOpcodes(Loc.SpillLocation.SpillOffset, Ops); 567 Ops.push_back(dwarf::DW_OP_deref); 568 DIExpr = DIExpression::appendOpsToArg(DIExpr, Ops, I); 569 } 570 MOs.push_back(MachineOperand::CreateReg(Base, false)); 571 MOs.back().setIsDebug(); 572 break; 573 } 574 case MachineLocKind::ImmediateKind: { 575 MOs.push_back(Orig); 576 break; 577 } 578 case MachineLocKind::InvalidKind: 579 llvm_unreachable("Tried to produce DBG_VALUE for invalid VarLoc"); 580 } 581 } 582 return BuildMI(MF, DbgLoc, IID, Indirect, MOs, Var, DIExpr); 583 } 584 585 /// Is the Loc field a constant or constant object? 586 bool isConstant(MachineLocKind Kind) const { 587 return Kind == MachineLocKind::ImmediateKind; 588 } 589 590 /// Check if the Loc field is an entry backup location. 591 bool isEntryBackupLoc() const { 592 return EVKind == EntryValueLocKind::EntryValueBackupKind || 593 EVKind == EntryValueLocKind::EntryValueCopyBackupKind; 594 } 595 596 /// If this variable is described by register \p Reg holding the entry 597 /// value, return true. 598 bool isEntryValueBackupReg(Register Reg) const { 599 return EVKind == EntryValueLocKind::EntryValueBackupKind && usesReg(Reg); 600 } 601 602 /// If this variable is described by register \p Reg holding a copy of the 603 /// entry value, return true. 604 bool isEntryValueCopyBackupReg(Register Reg) const { 605 return EVKind == EntryValueLocKind::EntryValueCopyBackupKind && 606 usesReg(Reg); 607 } 608 609 /// If this variable is described in whole or part by \p Reg, return true. 610 bool usesReg(Register Reg) const { 611 MachineLoc RegML; 612 RegML.Kind = MachineLocKind::RegisterKind; 613 RegML.Value.RegNo = Reg; 614 return is_contained(Locs, RegML); 615 } 616 617 /// If this variable is described in whole or part by \p Reg, return true. 618 unsigned getRegIdx(Register Reg) const { 619 for (unsigned Idx = 0; Idx < Locs.size(); ++Idx) 620 if (Locs[Idx].Kind == MachineLocKind::RegisterKind && 621 Register{static_cast<unsigned>(Locs[Idx].Value.RegNo)} == Reg) 622 return Idx; 623 llvm_unreachable("Could not find given Reg in Locs"); 624 } 625 626 /// If this variable is described in whole or part by 1 or more registers, 627 /// add each of them to \p Regs and return true. 628 bool getDescribingRegs(SmallVectorImpl<uint32_t> &Regs) const { 629 bool AnyRegs = false; 630 for (const auto &Loc : Locs) 631 if (Loc.Kind == MachineLocKind::RegisterKind) { 632 Regs.push_back(Loc.Value.RegNo); 633 AnyRegs = true; 634 } 635 return AnyRegs; 636 } 637 638 bool containsSpillLocs() const { 639 return any_of(Locs, [](VarLoc::MachineLoc ML) { 640 return ML.Kind == VarLoc::MachineLocKind::SpillLocKind; 641 }); 642 } 643 644 /// If this variable is described in whole or part by \p SpillLocation, 645 /// return true. 646 bool usesSpillLoc(SpillLoc SpillLocation) const { 647 MachineLoc SpillML; 648 SpillML.Kind = MachineLocKind::SpillLocKind; 649 SpillML.Value.SpillLocation = SpillLocation; 650 return is_contained(Locs, SpillML); 651 } 652 653 /// If this variable is described in whole or part by \p SpillLocation, 654 /// return the index . 655 unsigned getSpillLocIdx(SpillLoc SpillLocation) const { 656 for (unsigned Idx = 0; Idx < Locs.size(); ++Idx) 657 if (Locs[Idx].Kind == MachineLocKind::SpillLocKind && 658 Locs[Idx].Value.SpillLocation == SpillLocation) 659 return Idx; 660 llvm_unreachable("Could not find given SpillLoc in Locs"); 661 } 662 663 /// Determine whether the lexical scope of this value's debug location 664 /// dominates MBB. 665 bool dominates(LexicalScopes &LS, MachineBasicBlock &MBB) const { 666 return LS.dominates(MI.getDebugLoc().get(), &MBB); 667 } 668 669 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 670 // TRI can be null. 671 void dump(const TargetRegisterInfo *TRI, raw_ostream &Out = dbgs()) const { 672 Out << "VarLoc("; 673 for (const MachineLoc &MLoc : Locs) { 674 if (Locs.begin() != &MLoc) 675 Out << ", "; 676 switch (MLoc.Kind) { 677 case MachineLocKind::RegisterKind: 678 Out << printReg(MLoc.Value.RegNo, TRI); 679 break; 680 case MachineLocKind::SpillLocKind: 681 Out << printReg(MLoc.Value.SpillLocation.SpillBase, TRI); 682 Out << "[" << MLoc.Value.SpillLocation.SpillOffset.getFixed() << " + " 683 << MLoc.Value.SpillLocation.SpillOffset.getScalable() 684 << "x vscale" 685 << "]"; 686 break; 687 case MachineLocKind::ImmediateKind: 688 Out << MLoc.Value.Immediate; 689 break; 690 case MachineLocKind::InvalidKind: 691 llvm_unreachable("Invalid VarLoc in dump method"); 692 } 693 } 694 695 Out << ", \"" << Var.getVariable()->getName() << "\", " << *Expr << ", "; 696 if (Var.getInlinedAt()) 697 Out << "!" << Var.getInlinedAt()->getMetadataID() << ")\n"; 698 else 699 Out << "(null))"; 700 701 if (isEntryBackupLoc()) 702 Out << " (backup loc)\n"; 703 else 704 Out << "\n"; 705 } 706 #endif 707 708 bool operator==(const VarLoc &Other) const { 709 return std::tie(EVKind, Var, Expr, Locs) == 710 std::tie(Other.EVKind, Other.Var, Other.Expr, Other.Locs); 711 } 712 713 /// This operator guarantees that VarLocs are sorted by Variable first. 714 bool operator<(const VarLoc &Other) const { 715 return std::tie(Var, EVKind, Locs, Expr) < 716 std::tie(Other.Var, Other.EVKind, Other.Locs, Other.Expr); 717 } 718 }; 719 720 #ifndef NDEBUG 721 using VarVec = SmallVector<VarLoc, 32>; 722 #endif 723 724 /// VarLocMap is used for two things: 725 /// 1) Assigning LocIndices to a VarLoc. The LocIndices can be used to 726 /// virtually insert a VarLoc into a VarLocSet. 727 /// 2) Given a LocIndex, look up the unique associated VarLoc. 728 class VarLocMap { 729 /// Map a VarLoc to an index within the vector reserved for its location 730 /// within Loc2Vars. 731 std::map<VarLoc, LocIndices> Var2Indices; 732 733 /// Map a location to a vector which holds VarLocs which live in that 734 /// location. 735 SmallDenseMap<LocIndex::u32_location_t, std::vector<VarLoc>> Loc2Vars; 736 737 public: 738 /// Retrieve LocIndices for \p VL. 739 LocIndices insert(const VarLoc &VL) { 740 LocIndices &Indices = Var2Indices[VL]; 741 // If Indices is not empty, VL is already in the map. 742 if (!Indices.empty()) 743 return Indices; 744 SmallVector<LocIndex::u32_location_t, 4> Locations; 745 // LocIndices are determined by EVKind and MLs; each Register has a 746 // unique location, while all SpillLocs use a single bucket, and any EV 747 // VarLocs use only the Backup bucket or none at all (except the 748 // compulsory entry at the universal location index). LocIndices will 749 // always have an index at the universal location index as the last index. 750 if (VL.EVKind == VarLoc::EntryValueLocKind::NonEntryValueKind) { 751 VL.getDescribingRegs(Locations); 752 assert(all_of(Locations, 753 [](auto RegNo) { 754 return RegNo < LocIndex::kFirstInvalidRegLocation; 755 }) && 756 "Physreg out of range?"); 757 if (VL.containsSpillLocs()) { 758 LocIndex::u32_location_t Loc = LocIndex::kSpillLocation; 759 Locations.push_back(Loc); 760 } 761 } else if (VL.EVKind != VarLoc::EntryValueLocKind::EntryValueKind) { 762 LocIndex::u32_location_t Loc = LocIndex::kEntryValueBackupLocation; 763 Locations.push_back(Loc); 764 } 765 Locations.push_back(LocIndex::kUniversalLocation); 766 for (LocIndex::u32_location_t Location : Locations) { 767 auto &Vars = Loc2Vars[Location]; 768 Indices.push_back( 769 {Location, static_cast<LocIndex::u32_index_t>(Vars.size())}); 770 Vars.push_back(VL); 771 } 772 return Indices; 773 } 774 775 LocIndices getAllIndices(const VarLoc &VL) const { 776 auto IndIt = Var2Indices.find(VL); 777 assert(IndIt != Var2Indices.end() && "VarLoc not tracked"); 778 return IndIt->second; 779 } 780 781 /// Retrieve the unique VarLoc associated with \p ID. 782 const VarLoc &operator[](LocIndex ID) const { 783 auto LocIt = Loc2Vars.find(ID.Location); 784 assert(LocIt != Loc2Vars.end() && "Location not tracked"); 785 return LocIt->second[ID.Index]; 786 } 787 }; 788 789 using VarLocInMBB = 790 SmallDenseMap<const MachineBasicBlock *, std::unique_ptr<VarLocSet>>; 791 struct TransferDebugPair { 792 MachineInstr *TransferInst; ///< Instruction where this transfer occurs. 793 LocIndex LocationID; ///< Location number for the transfer dest. 794 }; 795 using TransferMap = SmallVector<TransferDebugPair, 4>; 796 // Types for recording Entry Var Locations emitted by a single MachineInstr, 797 // as well as recording MachineInstr which last defined a register. 798 using InstToEntryLocMap = std::multimap<const MachineInstr *, LocIndex>; 799 using RegDefToInstMap = DenseMap<Register, MachineInstr *>; 800 801 // Types for recording sets of variable fragments that overlap. For a given 802 // local variable, we record all other fragments of that variable that could 803 // overlap it, to reduce search time. 804 using FragmentOfVar = 805 std::pair<const DILocalVariable *, DIExpression::FragmentInfo>; 806 using OverlapMap = 807 DenseMap<FragmentOfVar, SmallVector<DIExpression::FragmentInfo, 1>>; 808 809 // Helper while building OverlapMap, a map of all fragments seen for a given 810 // DILocalVariable. 811 using VarToFragments = 812 DenseMap<const DILocalVariable *, SmallSet<FragmentInfo, 4>>; 813 814 /// Collects all VarLocs from \p CollectFrom. Each unique VarLoc is added 815 /// to \p Collected once, in order of insertion into \p VarLocIDs. 816 static void collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected, 817 const VarLocSet &CollectFrom, 818 const VarLocMap &VarLocIDs); 819 820 /// Get the registers which are used by VarLocs of kind RegisterKind tracked 821 /// by \p CollectFrom. 822 void getUsedRegs(const VarLocSet &CollectFrom, 823 SmallVectorImpl<Register> &UsedRegs) const; 824 825 /// This holds the working set of currently open ranges. For fast 826 /// access, this is done both as a set of VarLocIDs, and a map of 827 /// DebugVariable to recent VarLocID. Note that a DBG_VALUE ends all 828 /// previous open ranges for the same variable. In addition, we keep 829 /// two different maps (Vars/EntryValuesBackupVars), so erase/insert 830 /// methods act differently depending on whether a VarLoc is primary 831 /// location or backup one. In the case the VarLoc is backup location 832 /// we will erase/insert from the EntryValuesBackupVars map, otherwise 833 /// we perform the operation on the Vars. 834 class OpenRangesSet { 835 VarLocSet::Allocator &Alloc; 836 VarLocSet VarLocs; 837 // Map the DebugVariable to recent primary location ID. 838 SmallDenseMap<DebugVariable, LocIndices, 8> Vars; 839 // Map the DebugVariable to recent backup location ID. 840 SmallDenseMap<DebugVariable, LocIndices, 8> EntryValuesBackupVars; 841 OverlapMap &OverlappingFragments; 842 843 public: 844 OpenRangesSet(VarLocSet::Allocator &Alloc, OverlapMap &_OLapMap) 845 : Alloc(Alloc), VarLocs(Alloc), OverlappingFragments(_OLapMap) {} 846 847 const VarLocSet &getVarLocs() const { return VarLocs; } 848 849 // Fetches all VarLocs in \p VarLocIDs and inserts them into \p Collected. 850 // This method is needed to get every VarLoc once, as each VarLoc may have 851 // multiple indices in a VarLocMap (corresponding to each applicable 852 // location), but all VarLocs appear exactly once at the universal location 853 // index. 854 void getUniqueVarLocs(SmallVectorImpl<VarLoc> &Collected, 855 const VarLocMap &VarLocIDs) const { 856 collectAllVarLocs(Collected, VarLocs, VarLocIDs); 857 } 858 859 /// Terminate all open ranges for VL.Var by removing it from the set. 860 void erase(const VarLoc &VL); 861 862 /// Terminate all open ranges listed as indices in \c KillSet with 863 /// \c Location by removing them from the set. 864 void erase(const VarLocsInRange &KillSet, const VarLocMap &VarLocIDs, 865 LocIndex::u32_location_t Location); 866 867 /// Insert a new range into the set. 868 void insert(LocIndices VarLocIDs, const VarLoc &VL); 869 870 /// Insert a set of ranges. 871 void insertFromLocSet(const VarLocSet &ToLoad, const VarLocMap &Map); 872 873 llvm::Optional<LocIndices> getEntryValueBackup(DebugVariable Var); 874 875 /// Empty the set. 876 void clear() { 877 VarLocs.clear(); 878 Vars.clear(); 879 EntryValuesBackupVars.clear(); 880 } 881 882 /// Return whether the set is empty or not. 883 bool empty() const { 884 assert(Vars.empty() == EntryValuesBackupVars.empty() && 885 Vars.empty() == VarLocs.empty() && 886 "open ranges are inconsistent"); 887 return VarLocs.empty(); 888 } 889 890 /// Get an empty range of VarLoc IDs. 891 auto getEmptyVarLocRange() const { 892 return iterator_range<VarLocSet::const_iterator>(getVarLocs().end(), 893 getVarLocs().end()); 894 } 895 896 /// Get all set IDs for VarLocs with MLs of kind RegisterKind in \p Reg. 897 auto getRegisterVarLocs(Register Reg) const { 898 return LocIndex::indexRangeForLocation(getVarLocs(), Reg); 899 } 900 901 /// Get all set IDs for VarLocs with MLs of kind SpillLocKind. 902 auto getSpillVarLocs() const { 903 return LocIndex::indexRangeForLocation(getVarLocs(), 904 LocIndex::kSpillLocation); 905 } 906 907 /// Get all set IDs for VarLocs of EVKind EntryValueBackupKind or 908 /// EntryValueCopyBackupKind. 909 auto getEntryValueBackupVarLocs() const { 910 return LocIndex::indexRangeForLocation( 911 getVarLocs(), LocIndex::kEntryValueBackupLocation); 912 } 913 }; 914 915 /// Collect all VarLoc IDs from \p CollectFrom for VarLocs with MLs of kind 916 /// RegisterKind which are located in any reg in \p Regs. The IDs for each 917 /// VarLoc correspond to entries in the universal location bucket, which every 918 /// VarLoc has exactly 1 entry for. Insert collected IDs into \p Collected. 919 static void collectIDsForRegs(VarLocsInRange &Collected, 920 const DefinedRegsSet &Regs, 921 const VarLocSet &CollectFrom, 922 const VarLocMap &VarLocIDs); 923 924 VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB, VarLocInMBB &Locs) { 925 std::unique_ptr<VarLocSet> &VLS = Locs[MBB]; 926 if (!VLS) 927 VLS = std::make_unique<VarLocSet>(Alloc); 928 return *VLS.get(); 929 } 930 931 const VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB, 932 const VarLocInMBB &Locs) const { 933 auto It = Locs.find(MBB); 934 assert(It != Locs.end() && "MBB not in map"); 935 return *It->second.get(); 936 } 937 938 /// Tests whether this instruction is a spill to a stack location. 939 bool isSpillInstruction(const MachineInstr &MI, MachineFunction *MF); 940 941 /// Decide if @MI is a spill instruction and return true if it is. We use 2 942 /// criteria to make this decision: 943 /// - Is this instruction a store to a spill slot? 944 /// - Is there a register operand that is both used and killed? 945 /// TODO: Store optimization can fold spills into other stores (including 946 /// other spills). We do not handle this yet (more than one memory operand). 947 bool isLocationSpill(const MachineInstr &MI, MachineFunction *MF, 948 Register &Reg); 949 950 /// Returns true if the given machine instruction is a debug value which we 951 /// can emit entry values for. 952 /// 953 /// Currently, we generate debug entry values only for parameters that are 954 /// unmodified throughout the function and located in a register. 955 bool isEntryValueCandidate(const MachineInstr &MI, 956 const DefinedRegsSet &Regs) const; 957 958 /// If a given instruction is identified as a spill, return the spill location 959 /// and set \p Reg to the spilled register. 960 Optional<VarLoc::SpillLoc> isRestoreInstruction(const MachineInstr &MI, 961 MachineFunction *MF, 962 Register &Reg); 963 /// Given a spill instruction, extract the register and offset used to 964 /// address the spill location in a target independent way. 965 VarLoc::SpillLoc extractSpillBaseRegAndOffset(const MachineInstr &MI); 966 void insertTransferDebugPair(MachineInstr &MI, OpenRangesSet &OpenRanges, 967 TransferMap &Transfers, VarLocMap &VarLocIDs, 968 LocIndex OldVarID, TransferKind Kind, 969 const VarLoc::MachineLoc &OldLoc, 970 Register NewReg = Register()); 971 972 void transferDebugValue(const MachineInstr &MI, OpenRangesSet &OpenRanges, 973 VarLocMap &VarLocIDs, 974 InstToEntryLocMap &EntryValTransfers, 975 RegDefToInstMap &RegSetInstrs); 976 void transferSpillOrRestoreInst(MachineInstr &MI, OpenRangesSet &OpenRanges, 977 VarLocMap &VarLocIDs, TransferMap &Transfers); 978 void cleanupEntryValueTransfers(const MachineInstr *MI, 979 OpenRangesSet &OpenRanges, 980 VarLocMap &VarLocIDs, const VarLoc &EntryVL, 981 InstToEntryLocMap &EntryValTransfers); 982 void removeEntryValue(const MachineInstr &MI, OpenRangesSet &OpenRanges, 983 VarLocMap &VarLocIDs, const VarLoc &EntryVL, 984 InstToEntryLocMap &EntryValTransfers, 985 RegDefToInstMap &RegSetInstrs); 986 void emitEntryValues(MachineInstr &MI, OpenRangesSet &OpenRanges, 987 VarLocMap &VarLocIDs, 988 InstToEntryLocMap &EntryValTransfers, 989 VarLocsInRange &KillSet); 990 void recordEntryValue(const MachineInstr &MI, 991 const DefinedRegsSet &DefinedRegs, 992 OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs); 993 void transferRegisterCopy(MachineInstr &MI, OpenRangesSet &OpenRanges, 994 VarLocMap &VarLocIDs, TransferMap &Transfers); 995 void transferRegisterDef(MachineInstr &MI, OpenRangesSet &OpenRanges, 996 VarLocMap &VarLocIDs, 997 InstToEntryLocMap &EntryValTransfers, 998 RegDefToInstMap &RegSetInstrs); 999 bool transferTerminator(MachineBasicBlock *MBB, OpenRangesSet &OpenRanges, 1000 VarLocInMBB &OutLocs, const VarLocMap &VarLocIDs); 1001 1002 void process(MachineInstr &MI, OpenRangesSet &OpenRanges, 1003 VarLocMap &VarLocIDs, TransferMap &Transfers, 1004 InstToEntryLocMap &EntryValTransfers, 1005 RegDefToInstMap &RegSetInstrs); 1006 1007 void accumulateFragmentMap(MachineInstr &MI, VarToFragments &SeenFragments, 1008 OverlapMap &OLapMap); 1009 1010 bool join(MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs, 1011 const VarLocMap &VarLocIDs, 1012 SmallPtrSet<const MachineBasicBlock *, 16> &Visited, 1013 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks); 1014 1015 /// Create DBG_VALUE insts for inlocs that have been propagated but 1016 /// had their instruction creation deferred. 1017 void flushPendingLocs(VarLocInMBB &PendingInLocs, VarLocMap &VarLocIDs); 1018 1019 bool ExtendRanges(MachineFunction &MF, TargetPassConfig *TPC, 1020 unsigned InputBBLimit, unsigned InputDbgValLimit) override; 1021 1022 public: 1023 /// Default construct and initialize the pass. 1024 VarLocBasedLDV(); 1025 1026 ~VarLocBasedLDV(); 1027 1028 /// Print to ostream with a message. 1029 void printVarLocInMBB(const MachineFunction &MF, const VarLocInMBB &V, 1030 const VarLocMap &VarLocIDs, const char *msg, 1031 raw_ostream &Out) const; 1032 }; 1033 1034 } // end anonymous namespace 1035 1036 //===----------------------------------------------------------------------===// 1037 // Implementation 1038 //===----------------------------------------------------------------------===// 1039 1040 VarLocBasedLDV::VarLocBasedLDV() { } 1041 1042 VarLocBasedLDV::~VarLocBasedLDV() { } 1043 1044 /// Erase a variable from the set of open ranges, and additionally erase any 1045 /// fragments that may overlap it. If the VarLoc is a backup location, erase 1046 /// the variable from the EntryValuesBackupVars set, indicating we should stop 1047 /// tracking its backup entry location. Otherwise, if the VarLoc is primary 1048 /// location, erase the variable from the Vars set. 1049 void VarLocBasedLDV::OpenRangesSet::erase(const VarLoc &VL) { 1050 // Erasure helper. 1051 auto DoErase = [VL, this](DebugVariable VarToErase) { 1052 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars; 1053 auto It = EraseFrom->find(VarToErase); 1054 if (It != EraseFrom->end()) { 1055 LocIndices IDs = It->second; 1056 for (LocIndex ID : IDs) 1057 VarLocs.reset(ID.getAsRawInteger()); 1058 EraseFrom->erase(It); 1059 } 1060 }; 1061 1062 DebugVariable Var = VL.Var; 1063 1064 // Erase the variable/fragment that ends here. 1065 DoErase(Var); 1066 1067 // Extract the fragment. Interpret an empty fragment as one that covers all 1068 // possible bits. 1069 FragmentInfo ThisFragment = Var.getFragmentOrDefault(); 1070 1071 // There may be fragments that overlap the designated fragment. Look them up 1072 // in the pre-computed overlap map, and erase them too. 1073 auto MapIt = OverlappingFragments.find({Var.getVariable(), ThisFragment}); 1074 if (MapIt != OverlappingFragments.end()) { 1075 for (auto Fragment : MapIt->second) { 1076 VarLocBasedLDV::OptFragmentInfo FragmentHolder; 1077 if (!DebugVariable::isDefaultFragment(Fragment)) 1078 FragmentHolder = VarLocBasedLDV::OptFragmentInfo(Fragment); 1079 DoErase({Var.getVariable(), FragmentHolder, Var.getInlinedAt()}); 1080 } 1081 } 1082 } 1083 1084 void VarLocBasedLDV::OpenRangesSet::erase(const VarLocsInRange &KillSet, 1085 const VarLocMap &VarLocIDs, 1086 LocIndex::u32_location_t Location) { 1087 VarLocSet RemoveSet(Alloc); 1088 for (LocIndex::u32_index_t ID : KillSet) { 1089 const VarLoc &VL = VarLocIDs[LocIndex(Location, ID)]; 1090 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars; 1091 EraseFrom->erase(VL.Var); 1092 LocIndices VLI = VarLocIDs.getAllIndices(VL); 1093 for (LocIndex ID : VLI) 1094 RemoveSet.set(ID.getAsRawInteger()); 1095 } 1096 VarLocs.intersectWithComplement(RemoveSet); 1097 } 1098 1099 void VarLocBasedLDV::OpenRangesSet::insertFromLocSet(const VarLocSet &ToLoad, 1100 const VarLocMap &Map) { 1101 VarLocsInRange UniqueVarLocIDs; 1102 DefinedRegsSet Regs; 1103 Regs.insert(LocIndex::kUniversalLocation); 1104 collectIDsForRegs(UniqueVarLocIDs, Regs, ToLoad, Map); 1105 for (uint64_t ID : UniqueVarLocIDs) { 1106 LocIndex Idx = LocIndex::fromRawInteger(ID); 1107 const VarLoc &VarL = Map[Idx]; 1108 const LocIndices Indices = Map.getAllIndices(VarL); 1109 insert(Indices, VarL); 1110 } 1111 } 1112 1113 void VarLocBasedLDV::OpenRangesSet::insert(LocIndices VarLocIDs, 1114 const VarLoc &VL) { 1115 auto *InsertInto = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars; 1116 for (LocIndex ID : VarLocIDs) 1117 VarLocs.set(ID.getAsRawInteger()); 1118 InsertInto->insert({VL.Var, VarLocIDs}); 1119 } 1120 1121 /// Return the Loc ID of an entry value backup location, if it exists for the 1122 /// variable. 1123 llvm::Optional<LocIndices> 1124 VarLocBasedLDV::OpenRangesSet::getEntryValueBackup(DebugVariable Var) { 1125 auto It = EntryValuesBackupVars.find(Var); 1126 if (It != EntryValuesBackupVars.end()) 1127 return It->second; 1128 1129 return llvm::None; 1130 } 1131 1132 void VarLocBasedLDV::collectIDsForRegs(VarLocsInRange &Collected, 1133 const DefinedRegsSet &Regs, 1134 const VarLocSet &CollectFrom, 1135 const VarLocMap &VarLocIDs) { 1136 assert(!Regs.empty() && "Nothing to collect"); 1137 SmallVector<Register, 32> SortedRegs; 1138 append_range(SortedRegs, Regs); 1139 array_pod_sort(SortedRegs.begin(), SortedRegs.end()); 1140 auto It = CollectFrom.find(LocIndex::rawIndexForReg(SortedRegs.front())); 1141 auto End = CollectFrom.end(); 1142 for (Register Reg : SortedRegs) { 1143 // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains 1144 // all possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which 1145 // live in Reg. 1146 uint64_t FirstIndexForReg = LocIndex::rawIndexForReg(Reg); 1147 uint64_t FirstInvalidIndex = LocIndex::rawIndexForReg(Reg + 1); 1148 It.advanceToLowerBound(FirstIndexForReg); 1149 1150 // Iterate through that half-open interval and collect all the set IDs. 1151 for (; It != End && *It < FirstInvalidIndex; ++It) { 1152 LocIndex ItIdx = LocIndex::fromRawInteger(*It); 1153 const VarLoc &VL = VarLocIDs[ItIdx]; 1154 LocIndices LI = VarLocIDs.getAllIndices(VL); 1155 // For now, the back index is always the universal location index. 1156 assert(LI.back().Location == LocIndex::kUniversalLocation && 1157 "Unexpected order of LocIndices for VarLoc; was it inserted into " 1158 "the VarLocMap correctly?"); 1159 Collected.insert(LI.back().Index); 1160 } 1161 1162 if (It == End) 1163 return; 1164 } 1165 } 1166 1167 void VarLocBasedLDV::getUsedRegs(const VarLocSet &CollectFrom, 1168 SmallVectorImpl<Register> &UsedRegs) const { 1169 // All register-based VarLocs are assigned indices greater than or equal to 1170 // FirstRegIndex. 1171 uint64_t FirstRegIndex = 1172 LocIndex::rawIndexForReg(LocIndex::kFirstRegLocation); 1173 uint64_t FirstInvalidIndex = 1174 LocIndex::rawIndexForReg(LocIndex::kFirstInvalidRegLocation); 1175 for (auto It = CollectFrom.find(FirstRegIndex), 1176 End = CollectFrom.find(FirstInvalidIndex); 1177 It != End;) { 1178 // We found a VarLoc ID for a VarLoc that lives in a register. Figure out 1179 // which register and add it to UsedRegs. 1180 uint32_t FoundReg = LocIndex::fromRawInteger(*It).Location; 1181 assert((UsedRegs.empty() || FoundReg != UsedRegs.back()) && 1182 "Duplicate used reg"); 1183 UsedRegs.push_back(FoundReg); 1184 1185 // Skip to the next /set/ register. Note that this finds a lower bound, so 1186 // even if there aren't any VarLocs living in `FoundReg+1`, we're still 1187 // guaranteed to move on to the next register (or to end()). 1188 uint64_t NextRegIndex = LocIndex::rawIndexForReg(FoundReg + 1); 1189 It.advanceToLowerBound(NextRegIndex); 1190 } 1191 } 1192 1193 //===----------------------------------------------------------------------===// 1194 // Debug Range Extension Implementation 1195 //===----------------------------------------------------------------------===// 1196 1197 #ifndef NDEBUG 1198 void VarLocBasedLDV::printVarLocInMBB(const MachineFunction &MF, 1199 const VarLocInMBB &V, 1200 const VarLocMap &VarLocIDs, 1201 const char *msg, 1202 raw_ostream &Out) const { 1203 Out << '\n' << msg << '\n'; 1204 for (const MachineBasicBlock &BB : MF) { 1205 if (!V.count(&BB)) 1206 continue; 1207 const VarLocSet &L = getVarLocsInMBB(&BB, V); 1208 if (L.empty()) 1209 continue; 1210 SmallVector<VarLoc, 32> VarLocs; 1211 collectAllVarLocs(VarLocs, L, VarLocIDs); 1212 Out << "MBB: " << BB.getNumber() << ":\n"; 1213 for (const VarLoc &VL : VarLocs) { 1214 Out << " Var: " << VL.Var.getVariable()->getName(); 1215 Out << " MI: "; 1216 VL.dump(TRI, Out); 1217 } 1218 } 1219 Out << "\n"; 1220 } 1221 #endif 1222 1223 VarLocBasedLDV::VarLoc::SpillLoc 1224 VarLocBasedLDV::extractSpillBaseRegAndOffset(const MachineInstr &MI) { 1225 assert(MI.hasOneMemOperand() && 1226 "Spill instruction does not have exactly one memory operand?"); 1227 auto MMOI = MI.memoperands_begin(); 1228 const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue(); 1229 assert(PVal->kind() == PseudoSourceValue::FixedStack && 1230 "Inconsistent memory operand in spill instruction"); 1231 int FI = cast<FixedStackPseudoSourceValue>(PVal)->getFrameIndex(); 1232 const MachineBasicBlock *MBB = MI.getParent(); 1233 Register Reg; 1234 StackOffset Offset = TFI->getFrameIndexReference(*MBB->getParent(), FI, Reg); 1235 return {Reg, Offset}; 1236 } 1237 1238 /// Do cleanup of \p EntryValTransfers created by \p TRInst, by removing the 1239 /// Transfer, which uses the to-be-deleted \p EntryVL. 1240 void VarLocBasedLDV::cleanupEntryValueTransfers( 1241 const MachineInstr *TRInst, OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs, 1242 const VarLoc &EntryVL, InstToEntryLocMap &EntryValTransfers) { 1243 if (EntryValTransfers.empty() || TRInst == nullptr) 1244 return; 1245 1246 auto TransRange = EntryValTransfers.equal_range(TRInst); 1247 for (auto TDPair : llvm::make_range(TransRange.first, TransRange.second)) { 1248 const VarLoc &EmittedEV = VarLocIDs[TDPair.second]; 1249 if (std::tie(EntryVL.Var, EntryVL.Locs[0].Value.RegNo, EntryVL.Expr) == 1250 std::tie(EmittedEV.Var, EmittedEV.Locs[0].Value.RegNo, 1251 EmittedEV.Expr)) { 1252 OpenRanges.erase(EmittedEV); 1253 EntryValTransfers.erase(TRInst); 1254 break; 1255 } 1256 } 1257 } 1258 1259 /// Try to salvage the debug entry value if we encounter a new debug value 1260 /// describing the same parameter, otherwise stop tracking the value. Return 1261 /// true if we should stop tracking the entry value and do the cleanup of 1262 /// emitted Entry Value Transfers, otherwise return false. 1263 void VarLocBasedLDV::removeEntryValue(const MachineInstr &MI, 1264 OpenRangesSet &OpenRanges, 1265 VarLocMap &VarLocIDs, 1266 const VarLoc &EntryVL, 1267 InstToEntryLocMap &EntryValTransfers, 1268 RegDefToInstMap &RegSetInstrs) { 1269 // Skip the DBG_VALUE which is the debug entry value itself. 1270 if (&MI == &EntryVL.MI) 1271 return; 1272 1273 // If the parameter's location is not register location, we can not track 1274 // the entry value any more. It doesn't have the TransferInst which defines 1275 // register, so no Entry Value Transfers have been emitted already. 1276 if (!MI.getDebugOperand(0).isReg()) 1277 return; 1278 1279 // Try to get non-debug instruction responsible for the DBG_VALUE. 1280 const MachineInstr *TransferInst = nullptr; 1281 Register Reg = MI.getDebugOperand(0).getReg(); 1282 if (Reg.isValid() && RegSetInstrs.find(Reg) != RegSetInstrs.end()) 1283 TransferInst = RegSetInstrs.find(Reg)->second; 1284 1285 // Case of the parameter's DBG_VALUE at the start of entry MBB. 1286 if (!TransferInst && !LastNonDbgMI && MI.getParent()->isEntryBlock()) 1287 return; 1288 1289 // If the debug expression from the DBG_VALUE is not empty, we can assume the 1290 // parameter's value has changed indicating that we should stop tracking its 1291 // entry value as well. 1292 if (MI.getDebugExpression()->getNumElements() == 0 && TransferInst) { 1293 // If the DBG_VALUE comes from a copy instruction that copies the entry 1294 // value, it means the parameter's value has not changed and we should be 1295 // able to use its entry value. 1296 // TODO: Try to keep tracking of an entry value if we encounter a propagated 1297 // DBG_VALUE describing the copy of the entry value. (Propagated entry value 1298 // does not indicate the parameter modification.) 1299 auto DestSrc = TII->isCopyInstr(*TransferInst); 1300 if (DestSrc) { 1301 const MachineOperand *SrcRegOp, *DestRegOp; 1302 SrcRegOp = DestSrc->Source; 1303 DestRegOp = DestSrc->Destination; 1304 if (Reg == DestRegOp->getReg()) { 1305 for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) { 1306 const VarLoc &VL = VarLocIDs[LocIndex::fromRawInteger(ID)]; 1307 if (VL.isEntryValueCopyBackupReg(Reg) && 1308 // Entry Values should not be variadic. 1309 VL.MI.getDebugOperand(0).getReg() == SrcRegOp->getReg()) 1310 return; 1311 } 1312 } 1313 } 1314 } 1315 1316 LLVM_DEBUG(dbgs() << "Deleting a DBG entry value because of: "; 1317 MI.print(dbgs(), /*IsStandalone*/ false, 1318 /*SkipOpers*/ false, /*SkipDebugLoc*/ false, 1319 /*AddNewLine*/ true, TII)); 1320 cleanupEntryValueTransfers(TransferInst, OpenRanges, VarLocIDs, EntryVL, 1321 EntryValTransfers); 1322 OpenRanges.erase(EntryVL); 1323 } 1324 1325 /// End all previous ranges related to @MI and start a new range from @MI 1326 /// if it is a DBG_VALUE instr. 1327 void VarLocBasedLDV::transferDebugValue(const MachineInstr &MI, 1328 OpenRangesSet &OpenRanges, 1329 VarLocMap &VarLocIDs, 1330 InstToEntryLocMap &EntryValTransfers, 1331 RegDefToInstMap &RegSetInstrs) { 1332 if (!MI.isDebugValue()) 1333 return; 1334 const DILocalVariable *Var = MI.getDebugVariable(); 1335 const DIExpression *Expr = MI.getDebugExpression(); 1336 const DILocation *DebugLoc = MI.getDebugLoc(); 1337 const DILocation *InlinedAt = DebugLoc->getInlinedAt(); 1338 assert(Var->isValidLocationForIntrinsic(DebugLoc) && 1339 "Expected inlined-at fields to agree"); 1340 1341 DebugVariable V(Var, Expr, InlinedAt); 1342 1343 // Check if this DBG_VALUE indicates a parameter's value changing. 1344 // If that is the case, we should stop tracking its entry value. 1345 auto EntryValBackupID = OpenRanges.getEntryValueBackup(V); 1346 if (Var->isParameter() && EntryValBackupID) { 1347 const VarLoc &EntryVL = VarLocIDs[EntryValBackupID->back()]; 1348 removeEntryValue(MI, OpenRanges, VarLocIDs, EntryVL, EntryValTransfers, 1349 RegSetInstrs); 1350 } 1351 1352 if (all_of(MI.debug_operands(), [](const MachineOperand &MO) { 1353 return (MO.isReg() && MO.getReg()) || MO.isImm() || MO.isFPImm() || 1354 MO.isCImm(); 1355 })) { 1356 // Use normal VarLoc constructor for registers and immediates. 1357 VarLoc VL(MI, LS); 1358 // End all previous ranges of VL.Var. 1359 OpenRanges.erase(VL); 1360 1361 LocIndices IDs = VarLocIDs.insert(VL); 1362 // Add the VarLoc to OpenRanges from this DBG_VALUE. 1363 OpenRanges.insert(IDs, VL); 1364 } else if (MI.memoperands().size() > 0) { 1365 llvm_unreachable("DBG_VALUE with mem operand encountered after regalloc?"); 1366 } else { 1367 // This must be an undefined location. If it has an open range, erase it. 1368 assert(MI.isUndefDebugValue() && 1369 "Unexpected non-undef DBG_VALUE encountered"); 1370 VarLoc VL(MI, LS); 1371 OpenRanges.erase(VL); 1372 } 1373 } 1374 1375 // This should be removed later, doesn't fit the new design. 1376 void VarLocBasedLDV::collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected, 1377 const VarLocSet &CollectFrom, 1378 const VarLocMap &VarLocIDs) { 1379 // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains all 1380 // possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which live 1381 // in Reg. 1382 uint64_t FirstIndex = LocIndex::rawIndexForReg(LocIndex::kUniversalLocation); 1383 uint64_t FirstInvalidIndex = 1384 LocIndex::rawIndexForReg(LocIndex::kUniversalLocation + 1); 1385 // Iterate through that half-open interval and collect all the set IDs. 1386 for (auto It = CollectFrom.find(FirstIndex), End = CollectFrom.end(); 1387 It != End && *It < FirstInvalidIndex; ++It) { 1388 LocIndex RegIdx = LocIndex::fromRawInteger(*It); 1389 Collected.push_back(VarLocIDs[RegIdx]); 1390 } 1391 } 1392 1393 /// Turn the entry value backup locations into primary locations. 1394 void VarLocBasedLDV::emitEntryValues(MachineInstr &MI, 1395 OpenRangesSet &OpenRanges, 1396 VarLocMap &VarLocIDs, 1397 InstToEntryLocMap &EntryValTransfers, 1398 VarLocsInRange &KillSet) { 1399 // Do not insert entry value locations after a terminator. 1400 if (MI.isTerminator()) 1401 return; 1402 1403 for (uint32_t ID : KillSet) { 1404 // The KillSet IDs are indices for the universal location bucket. 1405 LocIndex Idx = LocIndex(LocIndex::kUniversalLocation, ID); 1406 const VarLoc &VL = VarLocIDs[Idx]; 1407 if (!VL.Var.getVariable()->isParameter()) 1408 continue; 1409 1410 auto DebugVar = VL.Var; 1411 Optional<LocIndices> EntryValBackupIDs = 1412 OpenRanges.getEntryValueBackup(DebugVar); 1413 1414 // If the parameter has the entry value backup, it means we should 1415 // be able to use its entry value. 1416 if (!EntryValBackupIDs) 1417 continue; 1418 1419 const VarLoc &EntryVL = VarLocIDs[EntryValBackupIDs->back()]; 1420 VarLoc EntryLoc = VarLoc::CreateEntryLoc(EntryVL.MI, LS, EntryVL.Expr, 1421 EntryVL.Locs[0].Value.RegNo); 1422 LocIndices EntryValueIDs = VarLocIDs.insert(EntryLoc); 1423 assert(EntryValueIDs.size() == 1 && 1424 "EntryValue loc should not be variadic"); 1425 EntryValTransfers.insert({&MI, EntryValueIDs.back()}); 1426 OpenRanges.insert(EntryValueIDs, EntryLoc); 1427 } 1428 } 1429 1430 /// Create new TransferDebugPair and insert it in \p Transfers. The VarLoc 1431 /// with \p OldVarID should be deleted form \p OpenRanges and replaced with 1432 /// new VarLoc. If \p NewReg is different than default zero value then the 1433 /// new location will be register location created by the copy like instruction, 1434 /// otherwise it is variable's location on the stack. 1435 void VarLocBasedLDV::insertTransferDebugPair( 1436 MachineInstr &MI, OpenRangesSet &OpenRanges, TransferMap &Transfers, 1437 VarLocMap &VarLocIDs, LocIndex OldVarID, TransferKind Kind, 1438 const VarLoc::MachineLoc &OldLoc, Register NewReg) { 1439 const VarLoc &OldVarLoc = VarLocIDs[OldVarID]; 1440 1441 auto ProcessVarLoc = [&MI, &OpenRanges, &Transfers, &VarLocIDs](VarLoc &VL) { 1442 LocIndices LocIds = VarLocIDs.insert(VL); 1443 1444 // Close this variable's previous location range. 1445 OpenRanges.erase(VL); 1446 1447 // Record the new location as an open range, and a postponed transfer 1448 // inserting a DBG_VALUE for this location. 1449 OpenRanges.insert(LocIds, VL); 1450 assert(!MI.isTerminator() && "Cannot insert DBG_VALUE after terminator"); 1451 TransferDebugPair MIP = {&MI, LocIds.back()}; 1452 Transfers.push_back(MIP); 1453 }; 1454 1455 // End all previous ranges of VL.Var. 1456 OpenRanges.erase(VarLocIDs[OldVarID]); 1457 switch (Kind) { 1458 case TransferKind::TransferCopy: { 1459 assert(NewReg && 1460 "No register supplied when handling a copy of a debug value"); 1461 // Create a DBG_VALUE instruction to describe the Var in its new 1462 // register location. 1463 VarLoc VL = VarLoc::CreateCopyLoc(OldVarLoc, OldLoc, NewReg); 1464 ProcessVarLoc(VL); 1465 LLVM_DEBUG({ 1466 dbgs() << "Creating VarLoc for register copy:"; 1467 VL.dump(TRI); 1468 }); 1469 return; 1470 } 1471 case TransferKind::TransferSpill: { 1472 // Create a DBG_VALUE instruction to describe the Var in its spilled 1473 // location. 1474 VarLoc::SpillLoc SpillLocation = extractSpillBaseRegAndOffset(MI); 1475 VarLoc VL = VarLoc::CreateSpillLoc( 1476 OldVarLoc, OldLoc, SpillLocation.SpillBase, SpillLocation.SpillOffset); 1477 ProcessVarLoc(VL); 1478 LLVM_DEBUG({ 1479 dbgs() << "Creating VarLoc for spill:"; 1480 VL.dump(TRI); 1481 }); 1482 return; 1483 } 1484 case TransferKind::TransferRestore: { 1485 assert(NewReg && 1486 "No register supplied when handling a restore of a debug value"); 1487 // DebugInstr refers to the pre-spill location, therefore we can reuse 1488 // its expression. 1489 VarLoc VL = VarLoc::CreateCopyLoc(OldVarLoc, OldLoc, NewReg); 1490 ProcessVarLoc(VL); 1491 LLVM_DEBUG({ 1492 dbgs() << "Creating VarLoc for restore:"; 1493 VL.dump(TRI); 1494 }); 1495 return; 1496 } 1497 } 1498 llvm_unreachable("Invalid transfer kind"); 1499 } 1500 1501 /// A definition of a register may mark the end of a range. 1502 void VarLocBasedLDV::transferRegisterDef(MachineInstr &MI, 1503 OpenRangesSet &OpenRanges, 1504 VarLocMap &VarLocIDs, 1505 InstToEntryLocMap &EntryValTransfers, 1506 RegDefToInstMap &RegSetInstrs) { 1507 1508 // Meta Instructions do not affect the debug liveness of any register they 1509 // define. 1510 if (MI.isMetaInstruction()) 1511 return; 1512 1513 MachineFunction *MF = MI.getMF(); 1514 const TargetLowering *TLI = MF->getSubtarget().getTargetLowering(); 1515 Register SP = TLI->getStackPointerRegisterToSaveRestore(); 1516 1517 // Find the regs killed by MI, and find regmasks of preserved regs. 1518 DefinedRegsSet DeadRegs; 1519 SmallVector<const uint32_t *, 4> RegMasks; 1520 for (const MachineOperand &MO : MI.operands()) { 1521 // Determine whether the operand is a register def. 1522 if (MO.isReg() && MO.isDef() && MO.getReg() && 1523 Register::isPhysicalRegister(MO.getReg()) && 1524 !(MI.isCall() && MO.getReg() == SP)) { 1525 // Remove ranges of all aliased registers. 1526 for (MCRegAliasIterator RAI(MO.getReg(), TRI, true); RAI.isValid(); ++RAI) 1527 // FIXME: Can we break out of this loop early if no insertion occurs? 1528 DeadRegs.insert(*RAI); 1529 if (RegSetInstrs.find(MO.getReg()) != RegSetInstrs.end()) 1530 RegSetInstrs.erase(MO.getReg()); 1531 RegSetInstrs.insert({MO.getReg(), &MI}); 1532 } else if (MO.isRegMask()) { 1533 RegMasks.push_back(MO.getRegMask()); 1534 } 1535 } 1536 1537 // Erase VarLocs which reside in one of the dead registers. For performance 1538 // reasons, it's critical to not iterate over the full set of open VarLocs. 1539 // Iterate over the set of dying/used regs instead. 1540 if (!RegMasks.empty()) { 1541 SmallVector<Register, 32> UsedRegs; 1542 getUsedRegs(OpenRanges.getVarLocs(), UsedRegs); 1543 for (Register Reg : UsedRegs) { 1544 // Remove ranges of all clobbered registers. Register masks don't usually 1545 // list SP as preserved. Assume that call instructions never clobber SP, 1546 // because some backends (e.g., AArch64) never list SP in the regmask. 1547 // While the debug info may be off for an instruction or two around 1548 // callee-cleanup calls, transferring the DEBUG_VALUE across the call is 1549 // still a better user experience. 1550 if (Reg == SP) 1551 continue; 1552 bool AnyRegMaskKillsReg = 1553 any_of(RegMasks, [Reg](const uint32_t *RegMask) { 1554 return MachineOperand::clobbersPhysReg(RegMask, Reg); 1555 }); 1556 if (AnyRegMaskKillsReg) 1557 DeadRegs.insert(Reg); 1558 if (AnyRegMaskKillsReg) { 1559 if (RegSetInstrs.find(Reg) != RegSetInstrs.end()) 1560 RegSetInstrs.erase(Reg); 1561 RegSetInstrs.insert({Reg, &MI}); 1562 } 1563 } 1564 } 1565 1566 if (DeadRegs.empty()) 1567 return; 1568 1569 VarLocsInRange KillSet; 1570 collectIDsForRegs(KillSet, DeadRegs, OpenRanges.getVarLocs(), VarLocIDs); 1571 OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kUniversalLocation); 1572 1573 if (TPC) { 1574 auto &TM = TPC->getTM<TargetMachine>(); 1575 if (TM.Options.ShouldEmitDebugEntryValues()) 1576 emitEntryValues(MI, OpenRanges, VarLocIDs, EntryValTransfers, KillSet); 1577 } 1578 } 1579 1580 bool VarLocBasedLDV::isSpillInstruction(const MachineInstr &MI, 1581 MachineFunction *MF) { 1582 // TODO: Handle multiple stores folded into one. 1583 if (!MI.hasOneMemOperand()) 1584 return false; 1585 1586 if (!MI.getSpillSize(TII) && !MI.getFoldedSpillSize(TII)) 1587 return false; // This is not a spill instruction, since no valid size was 1588 // returned from either function. 1589 1590 return true; 1591 } 1592 1593 bool VarLocBasedLDV::isLocationSpill(const MachineInstr &MI, 1594 MachineFunction *MF, Register &Reg) { 1595 if (!isSpillInstruction(MI, MF)) 1596 return false; 1597 1598 auto isKilledReg = [&](const MachineOperand MO, Register &Reg) { 1599 if (!MO.isReg() || !MO.isUse()) { 1600 Reg = 0; 1601 return false; 1602 } 1603 Reg = MO.getReg(); 1604 return MO.isKill(); 1605 }; 1606 1607 for (const MachineOperand &MO : MI.operands()) { 1608 // In a spill instruction generated by the InlineSpiller the spilled 1609 // register has its kill flag set. 1610 if (isKilledReg(MO, Reg)) 1611 return true; 1612 if (Reg != 0) { 1613 // Check whether next instruction kills the spilled register. 1614 // FIXME: Current solution does not cover search for killed register in 1615 // bundles and instructions further down the chain. 1616 auto NextI = std::next(MI.getIterator()); 1617 // Skip next instruction that points to basic block end iterator. 1618 if (MI.getParent()->end() == NextI) 1619 continue; 1620 Register RegNext; 1621 for (const MachineOperand &MONext : NextI->operands()) { 1622 // Return true if we came across the register from the 1623 // previous spill instruction that is killed in NextI. 1624 if (isKilledReg(MONext, RegNext) && RegNext == Reg) 1625 return true; 1626 } 1627 } 1628 } 1629 // Return false if we didn't find spilled register. 1630 return false; 1631 } 1632 1633 Optional<VarLocBasedLDV::VarLoc::SpillLoc> 1634 VarLocBasedLDV::isRestoreInstruction(const MachineInstr &MI, 1635 MachineFunction *MF, Register &Reg) { 1636 if (!MI.hasOneMemOperand()) 1637 return None; 1638 1639 // FIXME: Handle folded restore instructions with more than one memory 1640 // operand. 1641 if (MI.getRestoreSize(TII)) { 1642 Reg = MI.getOperand(0).getReg(); 1643 return extractSpillBaseRegAndOffset(MI); 1644 } 1645 return None; 1646 } 1647 1648 /// A spilled register may indicate that we have to end the current range of 1649 /// a variable and create a new one for the spill location. 1650 /// A restored register may indicate the reverse situation. 1651 /// We don't want to insert any instructions in process(), so we just create 1652 /// the DBG_VALUE without inserting it and keep track of it in \p Transfers. 1653 /// It will be inserted into the BB when we're done iterating over the 1654 /// instructions. 1655 void VarLocBasedLDV::transferSpillOrRestoreInst(MachineInstr &MI, 1656 OpenRangesSet &OpenRanges, 1657 VarLocMap &VarLocIDs, 1658 TransferMap &Transfers) { 1659 MachineFunction *MF = MI.getMF(); 1660 TransferKind TKind; 1661 Register Reg; 1662 Optional<VarLoc::SpillLoc> Loc; 1663 1664 LLVM_DEBUG(dbgs() << "Examining instruction: "; MI.dump();); 1665 1666 // First, if there are any DBG_VALUEs pointing at a spill slot that is 1667 // written to, then close the variable location. The value in memory 1668 // will have changed. 1669 VarLocsInRange KillSet; 1670 if (isSpillInstruction(MI, MF)) { 1671 Loc = extractSpillBaseRegAndOffset(MI); 1672 for (uint64_t ID : OpenRanges.getSpillVarLocs()) { 1673 LocIndex Idx = LocIndex::fromRawInteger(ID); 1674 const VarLoc &VL = VarLocIDs[Idx]; 1675 assert(VL.containsSpillLocs() && "Broken VarLocSet?"); 1676 if (VL.usesSpillLoc(*Loc)) { 1677 // This location is overwritten by the current instruction -- terminate 1678 // the open range, and insert an explicit DBG_VALUE $noreg. 1679 // 1680 // Doing this at a later stage would require re-interpreting all 1681 // DBG_VALUes and DIExpressions to identify whether they point at 1682 // memory, and then analysing all memory writes to see if they 1683 // overwrite that memory, which is expensive. 1684 // 1685 // At this stage, we already know which DBG_VALUEs are for spills and 1686 // where they are located; it's best to fix handle overwrites now. 1687 KillSet.insert(ID); 1688 unsigned SpillLocIdx = VL.getSpillLocIdx(*Loc); 1689 VarLoc::MachineLoc OldLoc = VL.Locs[SpillLocIdx]; 1690 VarLoc UndefVL = VarLoc::CreateCopyLoc(VL, OldLoc, 0); 1691 LocIndices UndefLocIDs = VarLocIDs.insert(UndefVL); 1692 Transfers.push_back({&MI, UndefLocIDs.back()}); 1693 } 1694 } 1695 OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kSpillLocation); 1696 } 1697 1698 // Try to recognise spill and restore instructions that may create a new 1699 // variable location. 1700 if (isLocationSpill(MI, MF, Reg)) { 1701 TKind = TransferKind::TransferSpill; 1702 LLVM_DEBUG(dbgs() << "Recognized as spill: "; MI.dump();); 1703 LLVM_DEBUG(dbgs() << "Register: " << Reg << " " << printReg(Reg, TRI) 1704 << "\n"); 1705 } else { 1706 if (!(Loc = isRestoreInstruction(MI, MF, Reg))) 1707 return; 1708 TKind = TransferKind::TransferRestore; 1709 LLVM_DEBUG(dbgs() << "Recognized as restore: "; MI.dump();); 1710 LLVM_DEBUG(dbgs() << "Register: " << Reg << " " << printReg(Reg, TRI) 1711 << "\n"); 1712 } 1713 // Check if the register or spill location is the location of a debug value. 1714 auto TransferCandidates = OpenRanges.getEmptyVarLocRange(); 1715 if (TKind == TransferKind::TransferSpill) 1716 TransferCandidates = OpenRanges.getRegisterVarLocs(Reg); 1717 else if (TKind == TransferKind::TransferRestore) 1718 TransferCandidates = OpenRanges.getSpillVarLocs(); 1719 for (uint64_t ID : TransferCandidates) { 1720 LocIndex Idx = LocIndex::fromRawInteger(ID); 1721 const VarLoc &VL = VarLocIDs[Idx]; 1722 unsigned LocIdx; 1723 if (TKind == TransferKind::TransferSpill) { 1724 assert(VL.usesReg(Reg) && "Broken VarLocSet?"); 1725 LLVM_DEBUG(dbgs() << "Spilling Register " << printReg(Reg, TRI) << '(' 1726 << VL.Var.getVariable()->getName() << ")\n"); 1727 LocIdx = VL.getRegIdx(Reg); 1728 } else { 1729 assert(TKind == TransferKind::TransferRestore && VL.containsSpillLocs() && 1730 "Broken VarLocSet?"); 1731 if (!VL.usesSpillLoc(*Loc)) 1732 // The spill location is not the location of a debug value. 1733 continue; 1734 LLVM_DEBUG(dbgs() << "Restoring Register " << printReg(Reg, TRI) << '(' 1735 << VL.Var.getVariable()->getName() << ")\n"); 1736 LocIdx = VL.getSpillLocIdx(*Loc); 1737 } 1738 VarLoc::MachineLoc MLoc = VL.Locs[LocIdx]; 1739 insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, Idx, TKind, 1740 MLoc, Reg); 1741 // FIXME: A comment should explain why it's correct to return early here, 1742 // if that is in fact correct. 1743 return; 1744 } 1745 } 1746 1747 /// If \p MI is a register copy instruction, that copies a previously tracked 1748 /// value from one register to another register that is callee saved, we 1749 /// create new DBG_VALUE instruction described with copy destination register. 1750 void VarLocBasedLDV::transferRegisterCopy(MachineInstr &MI, 1751 OpenRangesSet &OpenRanges, 1752 VarLocMap &VarLocIDs, 1753 TransferMap &Transfers) { 1754 auto DestSrc = TII->isCopyInstr(MI); 1755 if (!DestSrc) 1756 return; 1757 1758 const MachineOperand *DestRegOp = DestSrc->Destination; 1759 const MachineOperand *SrcRegOp = DestSrc->Source; 1760 1761 if (!DestRegOp->isDef()) 1762 return; 1763 1764 auto isCalleeSavedReg = [&](Register Reg) { 1765 for (MCRegAliasIterator RAI(Reg, TRI, true); RAI.isValid(); ++RAI) 1766 if (CalleeSavedRegs.test(*RAI)) 1767 return true; 1768 return false; 1769 }; 1770 1771 Register SrcReg = SrcRegOp->getReg(); 1772 Register DestReg = DestRegOp->getReg(); 1773 1774 // We want to recognize instructions where destination register is callee 1775 // saved register. If register that could be clobbered by the call is 1776 // included, there would be a great chance that it is going to be clobbered 1777 // soon. It is more likely that previous register location, which is callee 1778 // saved, is going to stay unclobbered longer, even if it is killed. 1779 if (!isCalleeSavedReg(DestReg)) 1780 return; 1781 1782 // Remember an entry value movement. If we encounter a new debug value of 1783 // a parameter describing only a moving of the value around, rather then 1784 // modifying it, we are still able to use the entry value if needed. 1785 if (isRegOtherThanSPAndFP(*DestRegOp, MI, TRI)) { 1786 for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) { 1787 LocIndex Idx = LocIndex::fromRawInteger(ID); 1788 const VarLoc &VL = VarLocIDs[Idx]; 1789 if (VL.isEntryValueBackupReg(SrcReg)) { 1790 LLVM_DEBUG(dbgs() << "Copy of the entry value: "; MI.dump();); 1791 VarLoc EntryValLocCopyBackup = 1792 VarLoc::CreateEntryCopyBackupLoc(VL.MI, LS, VL.Expr, DestReg); 1793 // Stop tracking the original entry value. 1794 OpenRanges.erase(VL); 1795 1796 // Start tracking the entry value copy. 1797 LocIndices EntryValCopyLocIDs = VarLocIDs.insert(EntryValLocCopyBackup); 1798 OpenRanges.insert(EntryValCopyLocIDs, EntryValLocCopyBackup); 1799 break; 1800 } 1801 } 1802 } 1803 1804 if (!SrcRegOp->isKill()) 1805 return; 1806 1807 for (uint64_t ID : OpenRanges.getRegisterVarLocs(SrcReg)) { 1808 LocIndex Idx = LocIndex::fromRawInteger(ID); 1809 assert(VarLocIDs[Idx].usesReg(SrcReg) && "Broken VarLocSet?"); 1810 VarLoc::MachineLocValue Loc; 1811 Loc.RegNo = SrcReg; 1812 VarLoc::MachineLoc MLoc{VarLoc::MachineLocKind::RegisterKind, Loc}; 1813 insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, Idx, 1814 TransferKind::TransferCopy, MLoc, DestReg); 1815 // FIXME: A comment should explain why it's correct to return early here, 1816 // if that is in fact correct. 1817 return; 1818 } 1819 } 1820 1821 /// Terminate all open ranges at the end of the current basic block. 1822 bool VarLocBasedLDV::transferTerminator(MachineBasicBlock *CurMBB, 1823 OpenRangesSet &OpenRanges, 1824 VarLocInMBB &OutLocs, 1825 const VarLocMap &VarLocIDs) { 1826 bool Changed = false; 1827 LLVM_DEBUG({ 1828 VarVec VarLocs; 1829 OpenRanges.getUniqueVarLocs(VarLocs, VarLocIDs); 1830 for (VarLoc &VL : VarLocs) { 1831 // Copy OpenRanges to OutLocs, if not already present. 1832 dbgs() << "Add to OutLocs in MBB #" << CurMBB->getNumber() << ": "; 1833 VL.dump(TRI); 1834 } 1835 }); 1836 VarLocSet &VLS = getVarLocsInMBB(CurMBB, OutLocs); 1837 Changed = VLS != OpenRanges.getVarLocs(); 1838 // New OutLocs set may be different due to spill, restore or register 1839 // copy instruction processing. 1840 if (Changed) 1841 VLS = OpenRanges.getVarLocs(); 1842 OpenRanges.clear(); 1843 return Changed; 1844 } 1845 1846 /// Accumulate a mapping between each DILocalVariable fragment and other 1847 /// fragments of that DILocalVariable which overlap. This reduces work during 1848 /// the data-flow stage from "Find any overlapping fragments" to "Check if the 1849 /// known-to-overlap fragments are present". 1850 /// \param MI A previously unprocessed DEBUG_VALUE instruction to analyze for 1851 /// fragment usage. 1852 /// \param SeenFragments Map from DILocalVariable to all fragments of that 1853 /// Variable which are known to exist. 1854 /// \param OverlappingFragments The overlap map being constructed, from one 1855 /// Var/Fragment pair to a vector of fragments known to overlap. 1856 void VarLocBasedLDV::accumulateFragmentMap(MachineInstr &MI, 1857 VarToFragments &SeenFragments, 1858 OverlapMap &OverlappingFragments) { 1859 DebugVariable MIVar(MI.getDebugVariable(), MI.getDebugExpression(), 1860 MI.getDebugLoc()->getInlinedAt()); 1861 FragmentInfo ThisFragment = MIVar.getFragmentOrDefault(); 1862 1863 // If this is the first sighting of this variable, then we are guaranteed 1864 // there are currently no overlapping fragments either. Initialize the set 1865 // of seen fragments, record no overlaps for the current one, and return. 1866 auto SeenIt = SeenFragments.find(MIVar.getVariable()); 1867 if (SeenIt == SeenFragments.end()) { 1868 SmallSet<FragmentInfo, 4> OneFragment; 1869 OneFragment.insert(ThisFragment); 1870 SeenFragments.insert({MIVar.getVariable(), OneFragment}); 1871 1872 OverlappingFragments.insert({{MIVar.getVariable(), ThisFragment}, {}}); 1873 return; 1874 } 1875 1876 // If this particular Variable/Fragment pair already exists in the overlap 1877 // map, it has already been accounted for. 1878 auto IsInOLapMap = 1879 OverlappingFragments.insert({{MIVar.getVariable(), ThisFragment}, {}}); 1880 if (!IsInOLapMap.second) 1881 return; 1882 1883 auto &ThisFragmentsOverlaps = IsInOLapMap.first->second; 1884 auto &AllSeenFragments = SeenIt->second; 1885 1886 // Otherwise, examine all other seen fragments for this variable, with "this" 1887 // fragment being a previously unseen fragment. Record any pair of 1888 // overlapping fragments. 1889 for (auto &ASeenFragment : AllSeenFragments) { 1890 // Does this previously seen fragment overlap? 1891 if (DIExpression::fragmentsOverlap(ThisFragment, ASeenFragment)) { 1892 // Yes: Mark the current fragment as being overlapped. 1893 ThisFragmentsOverlaps.push_back(ASeenFragment); 1894 // Mark the previously seen fragment as being overlapped by the current 1895 // one. 1896 auto ASeenFragmentsOverlaps = 1897 OverlappingFragments.find({MIVar.getVariable(), ASeenFragment}); 1898 assert(ASeenFragmentsOverlaps != OverlappingFragments.end() && 1899 "Previously seen var fragment has no vector of overlaps"); 1900 ASeenFragmentsOverlaps->second.push_back(ThisFragment); 1901 } 1902 } 1903 1904 AllSeenFragments.insert(ThisFragment); 1905 } 1906 1907 /// This routine creates OpenRanges. 1908 void VarLocBasedLDV::process(MachineInstr &MI, OpenRangesSet &OpenRanges, 1909 VarLocMap &VarLocIDs, TransferMap &Transfers, 1910 InstToEntryLocMap &EntryValTransfers, 1911 RegDefToInstMap &RegSetInstrs) { 1912 if (!MI.isDebugInstr()) 1913 LastNonDbgMI = &MI; 1914 transferDebugValue(MI, OpenRanges, VarLocIDs, EntryValTransfers, 1915 RegSetInstrs); 1916 transferRegisterDef(MI, OpenRanges, VarLocIDs, EntryValTransfers, 1917 RegSetInstrs); 1918 transferRegisterCopy(MI, OpenRanges, VarLocIDs, Transfers); 1919 transferSpillOrRestoreInst(MI, OpenRanges, VarLocIDs, Transfers); 1920 } 1921 1922 /// This routine joins the analysis results of all incoming edges in @MBB by 1923 /// inserting a new DBG_VALUE instruction at the start of the @MBB - if the same 1924 /// source variable in all the predecessors of @MBB reside in the same location. 1925 bool VarLocBasedLDV::join( 1926 MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs, 1927 const VarLocMap &VarLocIDs, 1928 SmallPtrSet<const MachineBasicBlock *, 16> &Visited, 1929 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks) { 1930 LLVM_DEBUG(dbgs() << "join MBB: " << MBB.getNumber() << "\n"); 1931 1932 VarLocSet InLocsT(Alloc); // Temporary incoming locations. 1933 1934 // For all predecessors of this MBB, find the set of VarLocs that 1935 // can be joined. 1936 int NumVisited = 0; 1937 for (auto p : MBB.predecessors()) { 1938 // Ignore backedges if we have not visited the predecessor yet. As the 1939 // predecessor hasn't yet had locations propagated into it, most locations 1940 // will not yet be valid, so treat them as all being uninitialized and 1941 // potentially valid. If a location guessed to be correct here is 1942 // invalidated later, we will remove it when we revisit this block. 1943 if (!Visited.count(p)) { 1944 LLVM_DEBUG(dbgs() << " ignoring unvisited pred MBB: " << p->getNumber() 1945 << "\n"); 1946 continue; 1947 } 1948 auto OL = OutLocs.find(p); 1949 // Join is null in case of empty OutLocs from any of the pred. 1950 if (OL == OutLocs.end()) 1951 return false; 1952 1953 // Just copy over the Out locs to incoming locs for the first visited 1954 // predecessor, and for all other predecessors join the Out locs. 1955 VarLocSet &OutLocVLS = *OL->second.get(); 1956 if (!NumVisited) 1957 InLocsT = OutLocVLS; 1958 else 1959 InLocsT &= OutLocVLS; 1960 1961 LLVM_DEBUG({ 1962 if (!InLocsT.empty()) { 1963 VarVec VarLocs; 1964 collectAllVarLocs(VarLocs, InLocsT, VarLocIDs); 1965 for (const VarLoc &VL : VarLocs) 1966 dbgs() << " gathered candidate incoming var: " 1967 << VL.Var.getVariable()->getName() << "\n"; 1968 } 1969 }); 1970 1971 NumVisited++; 1972 } 1973 1974 // Filter out DBG_VALUES that are out of scope. 1975 VarLocSet KillSet(Alloc); 1976 bool IsArtificial = ArtificialBlocks.count(&MBB); 1977 if (!IsArtificial) { 1978 for (uint64_t ID : InLocsT) { 1979 LocIndex Idx = LocIndex::fromRawInteger(ID); 1980 if (!VarLocIDs[Idx].dominates(LS, MBB)) { 1981 KillSet.set(ID); 1982 LLVM_DEBUG({ 1983 auto Name = VarLocIDs[Idx].Var.getVariable()->getName(); 1984 dbgs() << " killing " << Name << ", it doesn't dominate MBB\n"; 1985 }); 1986 } 1987 } 1988 } 1989 InLocsT.intersectWithComplement(KillSet); 1990 1991 // As we are processing blocks in reverse post-order we 1992 // should have processed at least one predecessor, unless it 1993 // is the entry block which has no predecessor. 1994 assert((NumVisited || MBB.pred_empty()) && 1995 "Should have processed at least one predecessor"); 1996 1997 VarLocSet &ILS = getVarLocsInMBB(&MBB, InLocs); 1998 bool Changed = false; 1999 if (ILS != InLocsT) { 2000 ILS = InLocsT; 2001 Changed = true; 2002 } 2003 2004 return Changed; 2005 } 2006 2007 void VarLocBasedLDV::flushPendingLocs(VarLocInMBB &PendingInLocs, 2008 VarLocMap &VarLocIDs) { 2009 // PendingInLocs records all locations propagated into blocks, which have 2010 // not had DBG_VALUE insts created. Go through and create those insts now. 2011 for (auto &Iter : PendingInLocs) { 2012 // Map is keyed on a constant pointer, unwrap it so we can insert insts. 2013 auto &MBB = const_cast<MachineBasicBlock &>(*Iter.first); 2014 VarLocSet &Pending = *Iter.second.get(); 2015 2016 SmallVector<VarLoc, 32> VarLocs; 2017 collectAllVarLocs(VarLocs, Pending, VarLocIDs); 2018 2019 for (VarLoc DiffIt : VarLocs) { 2020 // The ID location is live-in to MBB -- work out what kind of machine 2021 // location it is and create a DBG_VALUE. 2022 if (DiffIt.isEntryBackupLoc()) 2023 continue; 2024 MachineInstr *MI = DiffIt.BuildDbgValue(*MBB.getParent()); 2025 MBB.insert(MBB.instr_begin(), MI); 2026 2027 (void)MI; 2028 LLVM_DEBUG(dbgs() << "Inserted: "; MI->dump();); 2029 } 2030 } 2031 } 2032 2033 bool VarLocBasedLDV::isEntryValueCandidate( 2034 const MachineInstr &MI, const DefinedRegsSet &DefinedRegs) const { 2035 assert(MI.isDebugValue() && "This must be DBG_VALUE."); 2036 2037 // TODO: Add support for local variables that are expressed in terms of 2038 // parameters entry values. 2039 // TODO: Add support for modified arguments that can be expressed 2040 // by using its entry value. 2041 auto *DIVar = MI.getDebugVariable(); 2042 if (!DIVar->isParameter()) 2043 return false; 2044 2045 // Do not consider parameters that belong to an inlined function. 2046 if (MI.getDebugLoc()->getInlinedAt()) 2047 return false; 2048 2049 // Only consider parameters that are described using registers. Parameters 2050 // that are passed on the stack are not yet supported, so ignore debug 2051 // values that are described by the frame or stack pointer. 2052 if (!isRegOtherThanSPAndFP(MI.getDebugOperand(0), MI, TRI)) 2053 return false; 2054 2055 // If a parameter's value has been propagated from the caller, then the 2056 // parameter's DBG_VALUE may be described using a register defined by some 2057 // instruction in the entry block, in which case we shouldn't create an 2058 // entry value. 2059 if (DefinedRegs.count(MI.getDebugOperand(0).getReg())) 2060 return false; 2061 2062 // TODO: Add support for parameters that have a pre-existing debug expressions 2063 // (e.g. fragments). 2064 if (MI.getDebugExpression()->getNumElements() > 0) 2065 return false; 2066 2067 return true; 2068 } 2069 2070 /// Collect all register defines (including aliases) for the given instruction. 2071 static void collectRegDefs(const MachineInstr &MI, DefinedRegsSet &Regs, 2072 const TargetRegisterInfo *TRI) { 2073 for (const MachineOperand &MO : MI.operands()) 2074 if (MO.isReg() && MO.isDef() && MO.getReg()) 2075 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI) 2076 Regs.insert(*AI); 2077 } 2078 2079 /// This routine records the entry values of function parameters. The values 2080 /// could be used as backup values. If we loose the track of some unmodified 2081 /// parameters, the backup values will be used as a primary locations. 2082 void VarLocBasedLDV::recordEntryValue(const MachineInstr &MI, 2083 const DefinedRegsSet &DefinedRegs, 2084 OpenRangesSet &OpenRanges, 2085 VarLocMap &VarLocIDs) { 2086 if (TPC) { 2087 auto &TM = TPC->getTM<TargetMachine>(); 2088 if (!TM.Options.ShouldEmitDebugEntryValues()) 2089 return; 2090 } 2091 2092 DebugVariable V(MI.getDebugVariable(), MI.getDebugExpression(), 2093 MI.getDebugLoc()->getInlinedAt()); 2094 2095 if (!isEntryValueCandidate(MI, DefinedRegs) || 2096 OpenRanges.getEntryValueBackup(V)) 2097 return; 2098 2099 LLVM_DEBUG(dbgs() << "Creating the backup entry location: "; MI.dump();); 2100 2101 // Create the entry value and use it as a backup location until it is 2102 // valid. It is valid until a parameter is not changed. 2103 DIExpression *NewExpr = 2104 DIExpression::prepend(MI.getDebugExpression(), DIExpression::EntryValue); 2105 VarLoc EntryValLocAsBackup = VarLoc::CreateEntryBackupLoc(MI, LS, NewExpr); 2106 LocIndices EntryValLocIDs = VarLocIDs.insert(EntryValLocAsBackup); 2107 OpenRanges.insert(EntryValLocIDs, EntryValLocAsBackup); 2108 } 2109 2110 /// Calculate the liveness information for the given machine function and 2111 /// extend ranges across basic blocks. 2112 bool VarLocBasedLDV::ExtendRanges(MachineFunction &MF, TargetPassConfig *TPC, 2113 unsigned InputBBLimit, 2114 unsigned InputDbgValLimit) { 2115 LLVM_DEBUG(dbgs() << "\nDebug Range Extension\n"); 2116 2117 if (!MF.getFunction().getSubprogram()) 2118 // VarLocBaseLDV will already have removed all DBG_VALUEs. 2119 return false; 2120 2121 // Skip functions from NoDebug compilation units. 2122 if (MF.getFunction().getSubprogram()->getUnit()->getEmissionKind() == 2123 DICompileUnit::NoDebug) 2124 return false; 2125 2126 TRI = MF.getSubtarget().getRegisterInfo(); 2127 TII = MF.getSubtarget().getInstrInfo(); 2128 TFI = MF.getSubtarget().getFrameLowering(); 2129 TFI->getCalleeSaves(MF, CalleeSavedRegs); 2130 this->TPC = TPC; 2131 LS.initialize(MF); 2132 2133 bool Changed = false; 2134 bool OLChanged = false; 2135 bool MBBJoined = false; 2136 2137 VarLocMap VarLocIDs; // Map VarLoc<>unique ID for use in bitvectors. 2138 OverlapMap OverlapFragments; // Map of overlapping variable fragments. 2139 OpenRangesSet OpenRanges(Alloc, OverlapFragments); 2140 // Ranges that are open until end of bb. 2141 VarLocInMBB OutLocs; // Ranges that exist beyond bb. 2142 VarLocInMBB InLocs; // Ranges that are incoming after joining. 2143 TransferMap Transfers; // DBG_VALUEs associated with transfers (such as 2144 // spills, copies and restores). 2145 // Map responsible MI to attached Transfer emitted from Backup Entry Value. 2146 InstToEntryLocMap EntryValTransfers; 2147 // Map a Register to the last MI which clobbered it. 2148 RegDefToInstMap RegSetInstrs; 2149 2150 VarToFragments SeenFragments; 2151 2152 // Blocks which are artificial, i.e. blocks which exclusively contain 2153 // instructions without locations, or with line 0 locations. 2154 SmallPtrSet<const MachineBasicBlock *, 16> ArtificialBlocks; 2155 2156 DenseMap<unsigned int, MachineBasicBlock *> OrderToBB; 2157 DenseMap<MachineBasicBlock *, unsigned int> BBToOrder; 2158 std::priority_queue<unsigned int, std::vector<unsigned int>, 2159 std::greater<unsigned int>> 2160 Worklist; 2161 std::priority_queue<unsigned int, std::vector<unsigned int>, 2162 std::greater<unsigned int>> 2163 Pending; 2164 2165 // Set of register defines that are seen when traversing the entry block 2166 // looking for debug entry value candidates. 2167 DefinedRegsSet DefinedRegs; 2168 2169 // Only in the case of entry MBB collect DBG_VALUEs representing 2170 // function parameters in order to generate debug entry values for them. 2171 MachineBasicBlock &First_MBB = *(MF.begin()); 2172 for (auto &MI : First_MBB) { 2173 collectRegDefs(MI, DefinedRegs, TRI); 2174 if (MI.isDebugValue()) 2175 recordEntryValue(MI, DefinedRegs, OpenRanges, VarLocIDs); 2176 } 2177 2178 // Initialize per-block structures and scan for fragment overlaps. 2179 for (auto &MBB : MF) 2180 for (auto &MI : MBB) 2181 if (MI.isDebugValue()) 2182 accumulateFragmentMap(MI, SeenFragments, OverlapFragments); 2183 2184 auto hasNonArtificialLocation = [](const MachineInstr &MI) -> bool { 2185 if (const DebugLoc &DL = MI.getDebugLoc()) 2186 return DL.getLine() != 0; 2187 return false; 2188 }; 2189 for (auto &MBB : MF) 2190 if (none_of(MBB.instrs(), hasNonArtificialLocation)) 2191 ArtificialBlocks.insert(&MBB); 2192 2193 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs, 2194 "OutLocs after initialization", dbgs())); 2195 2196 ReversePostOrderTraversal<MachineFunction *> RPOT(&MF); 2197 unsigned int RPONumber = 0; 2198 for (MachineBasicBlock *MBB : RPOT) { 2199 OrderToBB[RPONumber] = MBB; 2200 BBToOrder[MBB] = RPONumber; 2201 Worklist.push(RPONumber); 2202 ++RPONumber; 2203 } 2204 2205 if (RPONumber > InputBBLimit) { 2206 unsigned NumInputDbgValues = 0; 2207 for (auto &MBB : MF) 2208 for (auto &MI : MBB) 2209 if (MI.isDebugValue()) 2210 ++NumInputDbgValues; 2211 if (NumInputDbgValues > InputDbgValLimit) { 2212 LLVM_DEBUG(dbgs() << "Disabling VarLocBasedLDV: " << MF.getName() 2213 << " has " << RPONumber << " basic blocks and " 2214 << NumInputDbgValues 2215 << " input DBG_VALUEs, exceeding limits.\n"); 2216 return false; 2217 } 2218 } 2219 2220 // This is a standard "union of predecessor outs" dataflow problem. 2221 // To solve it, we perform join() and process() using the two worklist method 2222 // until the ranges converge. 2223 // Ranges have converged when both worklists are empty. 2224 SmallPtrSet<const MachineBasicBlock *, 16> Visited; 2225 while (!Worklist.empty() || !Pending.empty()) { 2226 // We track what is on the pending worklist to avoid inserting the same 2227 // thing twice. We could avoid this with a custom priority queue, but this 2228 // is probably not worth it. 2229 SmallPtrSet<MachineBasicBlock *, 16> OnPending; 2230 LLVM_DEBUG(dbgs() << "Processing Worklist\n"); 2231 while (!Worklist.empty()) { 2232 MachineBasicBlock *MBB = OrderToBB[Worklist.top()]; 2233 Worklist.pop(); 2234 MBBJoined = join(*MBB, OutLocs, InLocs, VarLocIDs, Visited, 2235 ArtificialBlocks); 2236 MBBJoined |= Visited.insert(MBB).second; 2237 if (MBBJoined) { 2238 MBBJoined = false; 2239 Changed = true; 2240 // Now that we have started to extend ranges across BBs we need to 2241 // examine spill, copy and restore instructions to see whether they 2242 // operate with registers that correspond to user variables. 2243 // First load any pending inlocs. 2244 OpenRanges.insertFromLocSet(getVarLocsInMBB(MBB, InLocs), VarLocIDs); 2245 LastNonDbgMI = nullptr; 2246 RegSetInstrs.clear(); 2247 for (auto &MI : *MBB) 2248 process(MI, OpenRanges, VarLocIDs, Transfers, EntryValTransfers, 2249 RegSetInstrs); 2250 OLChanged |= transferTerminator(MBB, OpenRanges, OutLocs, VarLocIDs); 2251 2252 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs, 2253 "OutLocs after propagating", dbgs())); 2254 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs, 2255 "InLocs after propagating", dbgs())); 2256 2257 if (OLChanged) { 2258 OLChanged = false; 2259 for (auto s : MBB->successors()) 2260 if (OnPending.insert(s).second) { 2261 Pending.push(BBToOrder[s]); 2262 } 2263 } 2264 } 2265 } 2266 Worklist.swap(Pending); 2267 // At this point, pending must be empty, since it was just the empty 2268 // worklist 2269 assert(Pending.empty() && "Pending should be empty"); 2270 } 2271 2272 // Add any DBG_VALUE instructions created by location transfers. 2273 for (auto &TR : Transfers) { 2274 assert(!TR.TransferInst->isTerminator() && 2275 "Cannot insert DBG_VALUE after terminator"); 2276 MachineBasicBlock *MBB = TR.TransferInst->getParent(); 2277 const VarLoc &VL = VarLocIDs[TR.LocationID]; 2278 MachineInstr *MI = VL.BuildDbgValue(MF); 2279 MBB->insertAfterBundle(TR.TransferInst->getIterator(), MI); 2280 } 2281 Transfers.clear(); 2282 2283 // Add DBG_VALUEs created using Backup Entry Value location. 2284 for (auto &TR : EntryValTransfers) { 2285 MachineInstr *TRInst = const_cast<MachineInstr *>(TR.first); 2286 assert(!TRInst->isTerminator() && 2287 "Cannot insert DBG_VALUE after terminator"); 2288 MachineBasicBlock *MBB = TRInst->getParent(); 2289 const VarLoc &VL = VarLocIDs[TR.second]; 2290 MachineInstr *MI = VL.BuildDbgValue(MF); 2291 MBB->insertAfterBundle(TRInst->getIterator(), MI); 2292 } 2293 EntryValTransfers.clear(); 2294 2295 // Deferred inlocs will not have had any DBG_VALUE insts created; do 2296 // that now. 2297 flushPendingLocs(InLocs, VarLocIDs); 2298 2299 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs, "Final OutLocs", dbgs())); 2300 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs, "Final InLocs", dbgs())); 2301 return Changed; 2302 } 2303 2304 LDVImpl * 2305 llvm::makeVarLocBasedLiveDebugValues() 2306 { 2307 return new VarLocBasedLDV(); 2308 } 2309