1 //===- StackMaps.cpp ------------------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "llvm/CodeGen/StackMaps.h" 10 #include "llvm/ADT/DenseMapInfo.h" 11 #include "llvm/ADT/STLExtras.h" 12 #include "llvm/ADT/Twine.h" 13 #include "llvm/CodeGen/AsmPrinter.h" 14 #include "llvm/CodeGen/MachineFrameInfo.h" 15 #include "llvm/CodeGen/MachineFunction.h" 16 #include "llvm/CodeGen/MachineInstr.h" 17 #include "llvm/CodeGen/MachineOperand.h" 18 #include "llvm/CodeGen/TargetOpcodes.h" 19 #include "llvm/CodeGen/TargetRegisterInfo.h" 20 #include "llvm/CodeGen/TargetSubtargetInfo.h" 21 #include "llvm/IR/DataLayout.h" 22 #include "llvm/MC/MCContext.h" 23 #include "llvm/MC/MCExpr.h" 24 #include "llvm/MC/MCObjectFileInfo.h" 25 #include "llvm/MC/MCRegisterInfo.h" 26 #include "llvm/MC/MCStreamer.h" 27 #include "llvm/Support/CommandLine.h" 28 #include "llvm/Support/Debug.h" 29 #include "llvm/Support/ErrorHandling.h" 30 #include "llvm/Support/MathExtras.h" 31 #include "llvm/Support/raw_ostream.h" 32 #include <algorithm> 33 #include <cassert> 34 #include <cstdint> 35 #include <iterator> 36 #include <utility> 37 38 using namespace llvm; 39 40 #define DEBUG_TYPE "stackmaps" 41 42 static cl::opt<int> StackMapVersion( 43 "stackmap-version", cl::init(3), cl::Hidden, 44 cl::desc("Specify the stackmap encoding version (default = 3)")); 45 46 const char *StackMaps::WSMP = "Stack Maps: "; 47 48 StackMapOpers::StackMapOpers(const MachineInstr *MI) 49 : MI(MI) { 50 assert(getVarIdx() <= MI->getNumOperands() && 51 "invalid stackmap definition"); 52 } 53 54 PatchPointOpers::PatchPointOpers(const MachineInstr *MI) 55 : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() && 56 !MI->getOperand(0).isImplicit()) { 57 #ifndef NDEBUG 58 unsigned CheckStartIdx = 0, e = MI->getNumOperands(); 59 while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() && 60 MI->getOperand(CheckStartIdx).isDef() && 61 !MI->getOperand(CheckStartIdx).isImplicit()) 62 ++CheckStartIdx; 63 64 assert(getMetaIdx() == CheckStartIdx && 65 "Unexpected additional definition in Patchpoint intrinsic."); 66 #endif 67 } 68 69 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const { 70 if (!StartIdx) 71 StartIdx = getVarIdx(); 72 73 // Find the next scratch register (implicit def and early clobber) 74 unsigned ScratchIdx = StartIdx, e = MI->getNumOperands(); 75 while (ScratchIdx < e && 76 !(MI->getOperand(ScratchIdx).isReg() && 77 MI->getOperand(ScratchIdx).isDef() && 78 MI->getOperand(ScratchIdx).isImplicit() && 79 MI->getOperand(ScratchIdx).isEarlyClobber())) 80 ++ScratchIdx; 81 82 assert(ScratchIdx != e && "No scratch register available"); 83 return ScratchIdx; 84 } 85 86 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) { 87 if (StackMapVersion != 3) 88 llvm_unreachable("Unsupported stackmap version!"); 89 } 90 91 unsigned StackMaps::getNextMetaArgIdx(MachineInstr *MI, unsigned CurIdx) { 92 assert(CurIdx < MI->getNumOperands() && "Bad meta arg index"); 93 const auto &MO = MI->getOperand(CurIdx); 94 if (MO.isImm()) { 95 switch (MO.getImm()) { 96 default: 97 llvm_unreachable("Unrecognized operand type."); 98 case StackMaps::DirectMemRefOp: 99 CurIdx += 2; 100 break; 101 case StackMaps::IndirectMemRefOp: 102 CurIdx += 3; 103 break; 104 case StackMaps::ConstantOp: 105 ++CurIdx; 106 break; 107 } 108 } 109 ++CurIdx; 110 assert(CurIdx < MI->getNumOperands() && "points past operand list"); 111 return CurIdx; 112 } 113 114 /// Go up the super-register chain until we hit a valid dwarf register number. 115 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) { 116 int RegNum = TRI->getDwarfRegNum(Reg, false); 117 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid() && RegNum < 0; ++SR) 118 RegNum = TRI->getDwarfRegNum(*SR, false); 119 120 assert(RegNum >= 0 && "Invalid Dwarf register number."); 121 return (unsigned)RegNum; 122 } 123 124 MachineInstr::const_mop_iterator 125 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI, 126 MachineInstr::const_mop_iterator MOE, LocationVec &Locs, 127 LiveOutVec &LiveOuts) const { 128 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 129 if (MOI->isImm()) { 130 switch (MOI->getImm()) { 131 default: 132 llvm_unreachable("Unrecognized operand type."); 133 case StackMaps::DirectMemRefOp: { 134 auto &DL = AP.MF->getDataLayout(); 135 136 unsigned Size = DL.getPointerSizeInBits(); 137 assert((Size % 8) == 0 && "Need pointer size in bytes."); 138 Size /= 8; 139 Register Reg = (++MOI)->getReg(); 140 int64_t Imm = (++MOI)->getImm(); 141 Locs.emplace_back(StackMaps::Location::Direct, Size, 142 getDwarfRegNum(Reg, TRI), Imm); 143 break; 144 } 145 case StackMaps::IndirectMemRefOp: { 146 int64_t Size = (++MOI)->getImm(); 147 assert(Size > 0 && "Need a valid size for indirect memory locations."); 148 Register Reg = (++MOI)->getReg(); 149 int64_t Imm = (++MOI)->getImm(); 150 Locs.emplace_back(StackMaps::Location::Indirect, Size, 151 getDwarfRegNum(Reg, TRI), Imm); 152 break; 153 } 154 case StackMaps::ConstantOp: { 155 ++MOI; 156 assert(MOI->isImm() && "Expected constant operand."); 157 int64_t Imm = MOI->getImm(); 158 Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm); 159 break; 160 } 161 } 162 return ++MOI; 163 } 164 165 // The physical register number will ultimately be encoded as a DWARF regno. 166 // The stack map also records the size of a spill slot that can hold the 167 // register content. (The runtime can track the actual size of the data type 168 // if it needs to.) 169 if (MOI->isReg()) { 170 // Skip implicit registers (this includes our scratch registers) 171 if (MOI->isImplicit()) 172 return ++MOI; 173 174 assert(Register::isPhysicalRegister(MOI->getReg()) && 175 "Virtreg operands should have been rewritten before now."); 176 const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg()); 177 assert(!MOI->getSubReg() && "Physical subreg still around."); 178 179 unsigned Offset = 0; 180 unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI); 181 unsigned LLVMRegNum = *TRI->getLLVMRegNum(DwarfRegNum, false); 182 unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg()); 183 if (SubRegIdx) 184 Offset = TRI->getSubRegIdxOffset(SubRegIdx); 185 186 Locs.emplace_back(Location::Register, TRI->getSpillSize(*RC), 187 DwarfRegNum, Offset); 188 return ++MOI; 189 } 190 191 if (MOI->isRegLiveOut()) 192 LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut()); 193 194 return ++MOI; 195 } 196 197 void StackMaps::print(raw_ostream &OS) { 198 const TargetRegisterInfo *TRI = 199 AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr; 200 OS << WSMP << "callsites:\n"; 201 for (const auto &CSI : CSInfos) { 202 const LocationVec &CSLocs = CSI.Locations; 203 const LiveOutVec &LiveOuts = CSI.LiveOuts; 204 205 OS << WSMP << "callsite " << CSI.ID << "\n"; 206 OS << WSMP << " has " << CSLocs.size() << " locations\n"; 207 208 unsigned Idx = 0; 209 for (const auto &Loc : CSLocs) { 210 OS << WSMP << "\t\tLoc " << Idx << ": "; 211 switch (Loc.Type) { 212 case Location::Unprocessed: 213 OS << "<Unprocessed operand>"; 214 break; 215 case Location::Register: 216 OS << "Register "; 217 if (TRI) 218 OS << printReg(Loc.Reg, TRI); 219 else 220 OS << Loc.Reg; 221 break; 222 case Location::Direct: 223 OS << "Direct "; 224 if (TRI) 225 OS << printReg(Loc.Reg, TRI); 226 else 227 OS << Loc.Reg; 228 if (Loc.Offset) 229 OS << " + " << Loc.Offset; 230 break; 231 case Location::Indirect: 232 OS << "Indirect "; 233 if (TRI) 234 OS << printReg(Loc.Reg, TRI); 235 else 236 OS << Loc.Reg; 237 OS << "+" << Loc.Offset; 238 break; 239 case Location::Constant: 240 OS << "Constant " << Loc.Offset; 241 break; 242 case Location::ConstantIndex: 243 OS << "Constant Index " << Loc.Offset; 244 break; 245 } 246 OS << "\t[encoding: .byte " << Loc.Type << ", .byte 0" 247 << ", .short " << Loc.Size << ", .short " << Loc.Reg << ", .short 0" 248 << ", .int " << Loc.Offset << "]\n"; 249 Idx++; 250 } 251 252 OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n"; 253 254 Idx = 0; 255 for (const auto &LO : LiveOuts) { 256 OS << WSMP << "\t\tLO " << Idx << ": "; 257 if (TRI) 258 OS << printReg(LO.Reg, TRI); 259 else 260 OS << LO.Reg; 261 OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte " 262 << LO.Size << "]\n"; 263 Idx++; 264 } 265 } 266 } 267 268 /// Create a live-out register record for the given register Reg. 269 StackMaps::LiveOutReg 270 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const { 271 unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI); 272 unsigned Size = TRI->getSpillSize(*TRI->getMinimalPhysRegClass(Reg)); 273 return LiveOutReg(Reg, DwarfRegNum, Size); 274 } 275 276 /// Parse the register live-out mask and return a vector of live-out registers 277 /// that need to be recorded in the stackmap. 278 StackMaps::LiveOutVec 279 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const { 280 assert(Mask && "No register mask specified"); 281 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 282 LiveOutVec LiveOuts; 283 284 // Create a LiveOutReg for each bit that is set in the register mask. 285 for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg) 286 if ((Mask[Reg / 32] >> (Reg % 32)) & 1) 287 LiveOuts.push_back(createLiveOutReg(Reg, TRI)); 288 289 // We don't need to keep track of a register if its super-register is already 290 // in the list. Merge entries that refer to the same dwarf register and use 291 // the maximum size that needs to be spilled. 292 293 llvm::sort(LiveOuts, [](const LiveOutReg &LHS, const LiveOutReg &RHS) { 294 // Only sort by the dwarf register number. 295 return LHS.DwarfRegNum < RHS.DwarfRegNum; 296 }); 297 298 for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) { 299 for (auto II = std::next(I); II != E; ++II) { 300 if (I->DwarfRegNum != II->DwarfRegNum) { 301 // Skip all the now invalid entries. 302 I = --II; 303 break; 304 } 305 I->Size = std::max(I->Size, II->Size); 306 if (TRI->isSuperRegister(I->Reg, II->Reg)) 307 I->Reg = II->Reg; 308 II->Reg = 0; // mark for deletion. 309 } 310 } 311 312 LiveOuts.erase( 313 llvm::remove_if(LiveOuts, 314 [](const LiveOutReg &LO) { return LO.Reg == 0; }), 315 LiveOuts.end()); 316 317 return LiveOuts; 318 } 319 320 void StackMaps::recordStackMapOpers(const MCSymbol &MILabel, 321 const MachineInstr &MI, uint64_t ID, 322 MachineInstr::const_mop_iterator MOI, 323 MachineInstr::const_mop_iterator MOE, 324 bool recordResult) { 325 MCContext &OutContext = AP.OutStreamer->getContext(); 326 327 LocationVec Locations; 328 LiveOutVec LiveOuts; 329 330 if (recordResult) { 331 assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value."); 332 parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations, 333 LiveOuts); 334 } 335 336 // Parse operands. 337 while (MOI != MOE) { 338 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); 339 } 340 341 // Move large constants into the constant pool. 342 for (auto &Loc : Locations) { 343 // Constants are encoded as sign-extended integers. 344 // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool. 345 if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) { 346 Loc.Type = Location::ConstantIndex; 347 // ConstPool is intentionally a MapVector of 'uint64_t's (as 348 // opposed to 'int64_t's). We should never be in a situation 349 // where we have to insert either the tombstone or the empty 350 // keys into a map, and for a DenseMap<uint64_t, T> these are 351 // (uint64_t)0 and (uint64_t)-1. They can be and are 352 // represented using 32 bit integers. 353 assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() && 354 (uint64_t)Loc.Offset != 355 DenseMapInfo<uint64_t>::getTombstoneKey() && 356 "empty and tombstone keys should fit in 32 bits!"); 357 auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset)); 358 Loc.Offset = Result.first - ConstPool.begin(); 359 } 360 } 361 362 // Create an expression to calculate the offset of the callsite from function 363 // entry. 364 const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub( 365 MCSymbolRefExpr::create(&MILabel, OutContext), 366 MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext); 367 368 CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations), 369 std::move(LiveOuts)); 370 371 // Record the stack size of the current function and update callsite count. 372 const MachineFrameInfo &MFI = AP.MF->getFrameInfo(); 373 const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo(); 374 bool HasDynamicFrameSize = 375 MFI.hasVarSizedObjects() || RegInfo->needsStackRealignment(*(AP.MF)); 376 uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize(); 377 378 auto CurrentIt = FnInfos.find(AP.CurrentFnSym); 379 if (CurrentIt != FnInfos.end()) 380 CurrentIt->second.RecordCount++; 381 else 382 FnInfos.insert(std::make_pair(AP.CurrentFnSym, FunctionInfo(FrameSize))); 383 } 384 385 void StackMaps::recordStackMap(const MCSymbol &L, const MachineInstr &MI) { 386 assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap"); 387 388 StackMapOpers opers(&MI); 389 const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm(); 390 recordStackMapOpers(L, MI, ID, std::next(MI.operands_begin(), 391 opers.getVarIdx()), 392 MI.operands_end()); 393 } 394 395 void StackMaps::recordPatchPoint(const MCSymbol &L, const MachineInstr &MI) { 396 assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint"); 397 398 PatchPointOpers opers(&MI); 399 const int64_t ID = opers.getID(); 400 auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx()); 401 recordStackMapOpers(L, MI, ID, MOI, MI.operands_end(), 402 opers.isAnyReg() && opers.hasDef()); 403 404 #ifndef NDEBUG 405 // verify anyregcc 406 auto &Locations = CSInfos.back().Locations; 407 if (opers.isAnyReg()) { 408 unsigned NArgs = opers.getNumCallArgs(); 409 for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i) 410 assert(Locations[i].Type == Location::Register && 411 "anyreg arg must be in reg."); 412 } 413 #endif 414 } 415 416 void StackMaps::recordStatepoint(const MCSymbol &L, const MachineInstr &MI) { 417 assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint"); 418 419 StatepointOpers opers(&MI); 420 // Record all the deopt and gc operands (they're contiguous and run from the 421 // initial index to the end of the operand list) 422 const unsigned StartIdx = opers.getVarIdx(); 423 recordStackMapOpers(L, MI, opers.getID(), MI.operands_begin() + StartIdx, 424 MI.operands_end(), false); 425 } 426 427 /// Emit the stackmap header. 428 /// 429 /// Header { 430 /// uint8 : Stack Map Version (currently 3) 431 /// uint8 : Reserved (expected to be 0) 432 /// uint16 : Reserved (expected to be 0) 433 /// } 434 /// uint32 : NumFunctions 435 /// uint32 : NumConstants 436 /// uint32 : NumRecords 437 void StackMaps::emitStackmapHeader(MCStreamer &OS) { 438 // Header. 439 OS.emitIntValue(StackMapVersion, 1); // Version. 440 OS.emitIntValue(0, 1); // Reserved. 441 OS.emitInt16(0); // Reserved. 442 443 // Num functions. 444 LLVM_DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n'); 445 OS.emitInt32(FnInfos.size()); 446 // Num constants. 447 LLVM_DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n'); 448 OS.emitInt32(ConstPool.size()); 449 // Num callsites. 450 LLVM_DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n'); 451 OS.emitInt32(CSInfos.size()); 452 } 453 454 /// Emit the function frame record for each function. 455 /// 456 /// StkSizeRecord[NumFunctions] { 457 /// uint64 : Function Address 458 /// uint64 : Stack Size 459 /// uint64 : Record Count 460 /// } 461 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) { 462 // Function Frame records. 463 LLVM_DEBUG(dbgs() << WSMP << "functions:\n"); 464 for (auto const &FR : FnInfos) { 465 LLVM_DEBUG(dbgs() << WSMP << "function addr: " << FR.first 466 << " frame size: " << FR.second.StackSize 467 << " callsite count: " << FR.second.RecordCount << '\n'); 468 OS.emitSymbolValue(FR.first, 8); 469 OS.emitIntValue(FR.second.StackSize, 8); 470 OS.emitIntValue(FR.second.RecordCount, 8); 471 } 472 } 473 474 /// Emit the constant pool. 475 /// 476 /// int64 : Constants[NumConstants] 477 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) { 478 // Constant pool entries. 479 LLVM_DEBUG(dbgs() << WSMP << "constants:\n"); 480 for (const auto &ConstEntry : ConstPool) { 481 LLVM_DEBUG(dbgs() << WSMP << ConstEntry.second << '\n'); 482 OS.emitIntValue(ConstEntry.second, 8); 483 } 484 } 485 486 /// Emit the callsite info for each callsite. 487 /// 488 /// StkMapRecord[NumRecords] { 489 /// uint64 : PatchPoint ID 490 /// uint32 : Instruction Offset 491 /// uint16 : Reserved (record flags) 492 /// uint16 : NumLocations 493 /// Location[NumLocations] { 494 /// uint8 : Register | Direct | Indirect | Constant | ConstantIndex 495 /// uint8 : Size in Bytes 496 /// uint16 : Dwarf RegNum 497 /// int32 : Offset 498 /// } 499 /// uint16 : Padding 500 /// uint16 : NumLiveOuts 501 /// LiveOuts[NumLiveOuts] { 502 /// uint16 : Dwarf RegNum 503 /// uint8 : Reserved 504 /// uint8 : Size in Bytes 505 /// } 506 /// uint32 : Padding (only if required to align to 8 byte) 507 /// } 508 /// 509 /// Location Encoding, Type, Value: 510 /// 0x1, Register, Reg (value in register) 511 /// 0x2, Direct, Reg + Offset (frame index) 512 /// 0x3, Indirect, [Reg + Offset] (spilled value) 513 /// 0x4, Constant, Offset (small constant) 514 /// 0x5, ConstIndex, Constants[Offset] (large constant) 515 void StackMaps::emitCallsiteEntries(MCStreamer &OS) { 516 LLVM_DEBUG(print(dbgs())); 517 // Callsite entries. 518 for (const auto &CSI : CSInfos) { 519 const LocationVec &CSLocs = CSI.Locations; 520 const LiveOutVec &LiveOuts = CSI.LiveOuts; 521 522 // Verify stack map entry. It's better to communicate a problem to the 523 // runtime than crash in case of in-process compilation. Currently, we do 524 // simple overflow checks, but we may eventually communicate other 525 // compilation errors this way. 526 if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) { 527 OS.emitIntValue(UINT64_MAX, 8); // Invalid ID. 528 OS.emitValue(CSI.CSOffsetExpr, 4); 529 OS.emitInt16(0); // Reserved. 530 OS.emitInt16(0); // 0 locations. 531 OS.emitInt16(0); // padding. 532 OS.emitInt16(0); // 0 live-out registers. 533 OS.emitInt32(0); // padding. 534 continue; 535 } 536 537 OS.emitIntValue(CSI.ID, 8); 538 OS.emitValue(CSI.CSOffsetExpr, 4); 539 540 // Reserved for flags. 541 OS.emitInt16(0); 542 OS.emitInt16(CSLocs.size()); 543 544 for (const auto &Loc : CSLocs) { 545 OS.emitIntValue(Loc.Type, 1); 546 OS.emitIntValue(0, 1); // Reserved 547 OS.emitInt16(Loc.Size); 548 OS.emitInt16(Loc.Reg); 549 OS.emitInt16(0); // Reserved 550 OS.emitInt32(Loc.Offset); 551 } 552 553 // Emit alignment to 8 byte. 554 OS.emitValueToAlignment(8); 555 556 // Num live-out registers and padding to align to 4 byte. 557 OS.emitInt16(0); 558 OS.emitInt16(LiveOuts.size()); 559 560 for (const auto &LO : LiveOuts) { 561 OS.emitInt16(LO.DwarfRegNum); 562 OS.emitIntValue(0, 1); 563 OS.emitIntValue(LO.Size, 1); 564 } 565 // Emit alignment to 8 byte. 566 OS.emitValueToAlignment(8); 567 } 568 } 569 570 /// Serialize the stackmap data. 571 void StackMaps::serializeToStackMapSection() { 572 (void)WSMP; 573 // Bail out if there's no stack map data. 574 assert((!CSInfos.empty() || ConstPool.empty()) && 575 "Expected empty constant pool too!"); 576 assert((!CSInfos.empty() || FnInfos.empty()) && 577 "Expected empty function record too!"); 578 if (CSInfos.empty()) 579 return; 580 581 MCContext &OutContext = AP.OutStreamer->getContext(); 582 MCStreamer &OS = *AP.OutStreamer; 583 584 // Create the section. 585 MCSection *StackMapSection = 586 OutContext.getObjectFileInfo()->getStackMapSection(); 587 OS.SwitchSection(StackMapSection); 588 589 // Emit a dummy symbol to force section inclusion. 590 OS.emitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps"))); 591 592 // Serialize data. 593 LLVM_DEBUG(dbgs() << "********** Stack Map Output **********\n"); 594 emitStackmapHeader(OS); 595 emitFunctionFrameRecords(OS); 596 emitConstantPoolEntries(OS); 597 emitCallsiteEntries(OS); 598 OS.AddBlankLine(); 599 600 // Clean up. 601 CSInfos.clear(); 602 ConstPool.clear(); 603 } 604