1 //===- StackMaps.cpp ------------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "llvm/CodeGen/StackMaps.h"
10 #include "llvm/ADT/DenseMapInfo.h"
11 #include "llvm/ADT/STLExtras.h"
12 #include "llvm/ADT/Twine.h"
13 #include "llvm/CodeGen/AsmPrinter.h"
14 #include "llvm/CodeGen/MachineFrameInfo.h"
15 #include "llvm/CodeGen/MachineFunction.h"
16 #include "llvm/CodeGen/MachineInstr.h"
17 #include "llvm/CodeGen/MachineOperand.h"
18 #include "llvm/CodeGen/TargetOpcodes.h"
19 #include "llvm/CodeGen/TargetRegisterInfo.h"
20 #include "llvm/CodeGen/TargetSubtargetInfo.h"
21 #include "llvm/IR/DataLayout.h"
22 #include "llvm/MC/MCContext.h"
23 #include "llvm/MC/MCExpr.h"
24 #include "llvm/MC/MCObjectFileInfo.h"
25 #include "llvm/MC/MCRegisterInfo.h"
26 #include "llvm/MC/MCStreamer.h"
27 #include "llvm/Support/CommandLine.h"
28 #include "llvm/Support/Debug.h"
29 #include "llvm/Support/ErrorHandling.h"
30 #include "llvm/Support/MathExtras.h"
31 #include "llvm/Support/raw_ostream.h"
32 #include <algorithm>
33 #include <cassert>
34 #include <cstdint>
35 #include <iterator>
36 #include <utility>
37 
38 using namespace llvm;
39 
40 #define DEBUG_TYPE "stackmaps"
41 
42 static cl::opt<int> StackMapVersion(
43     "stackmap-version", cl::init(3), cl::Hidden,
44     cl::desc("Specify the stackmap encoding version (default = 3)"));
45 
46 const char *StackMaps::WSMP = "Stack Maps: ";
47 
48 static uint64_t getConstMetaVal(const MachineInstr &MI, unsigned Idx) {
49   assert(MI.getOperand(Idx).isImm() &&
50          MI.getOperand(Idx).getImm() == StackMaps::ConstantOp);
51   const auto &MO = MI.getOperand(Idx + 1);
52   assert(MO.isImm());
53   return MO.getImm();
54 }
55 
56 StackMapOpers::StackMapOpers(const MachineInstr *MI)
57   : MI(MI) {
58   assert(getVarIdx() <= MI->getNumOperands() &&
59          "invalid stackmap definition");
60 }
61 
62 PatchPointOpers::PatchPointOpers(const MachineInstr *MI)
63     : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() &&
64                      !MI->getOperand(0).isImplicit()) {
65 #ifndef NDEBUG
66   unsigned CheckStartIdx = 0, e = MI->getNumOperands();
67   while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() &&
68          MI->getOperand(CheckStartIdx).isDef() &&
69          !MI->getOperand(CheckStartIdx).isImplicit())
70     ++CheckStartIdx;
71 
72   assert(getMetaIdx() == CheckStartIdx &&
73          "Unexpected additional definition in Patchpoint intrinsic.");
74 #endif
75 }
76 
77 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const {
78   if (!StartIdx)
79     StartIdx = getVarIdx();
80 
81   // Find the next scratch register (implicit def and early clobber)
82   unsigned ScratchIdx = StartIdx, e = MI->getNumOperands();
83   while (ScratchIdx < e &&
84          !(MI->getOperand(ScratchIdx).isReg() &&
85            MI->getOperand(ScratchIdx).isDef() &&
86            MI->getOperand(ScratchIdx).isImplicit() &&
87            MI->getOperand(ScratchIdx).isEarlyClobber()))
88     ++ScratchIdx;
89 
90   assert(ScratchIdx != e && "No scratch register available");
91   return ScratchIdx;
92 }
93 
94 unsigned StatepointOpers::getNumGcMapEntriesIdx() {
95   // Take index of num of allocas and skip all allocas records.
96   unsigned CurIdx = getNumAllocaIdx();
97   unsigned NumAllocas = getConstMetaVal(*MI, CurIdx - 1);
98   CurIdx++;
99   while (NumAllocas--)
100     CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
101   return CurIdx + 1; // skip <StackMaps::ConstantOp>
102 }
103 
104 unsigned StatepointOpers::getNumAllocaIdx() {
105   // Take index of num of gc ptrs and skip all gc ptr records.
106   unsigned CurIdx = getNumGCPtrIdx();
107   unsigned NumGCPtrs = getConstMetaVal(*MI, CurIdx - 1);
108   CurIdx++;
109   while (NumGCPtrs--)
110     CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
111   return CurIdx + 1; // skip <StackMaps::ConstantOp>
112 }
113 
114 unsigned StatepointOpers::getNumGCPtrIdx() {
115   // Take index of num of deopt args and skip all deopt records.
116   unsigned CurIdx = getNumDeoptArgsIdx();
117   unsigned NumDeoptArgs = getConstMetaVal(*MI, CurIdx - 1);
118   CurIdx++;
119   while (NumDeoptArgs--) {
120     CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
121   }
122   return CurIdx + 1; // skip <StackMaps::ConstantOp>
123 }
124 
125 int StatepointOpers::getFirstGCPtrIdx() {
126   unsigned NumGCPtrsIdx = getNumGCPtrIdx();
127   unsigned NumGCPtrs = getConstMetaVal(*MI, NumGCPtrsIdx - 1);
128   if (NumGCPtrs == 0)
129     return -1;
130   ++NumGCPtrsIdx; // skip <num gc ptrs>
131   assert(NumGCPtrsIdx < MI->getNumOperands());
132   return (int)NumGCPtrsIdx;
133 }
134 
135 unsigned StatepointOpers::getGCPointerMap(
136     SmallVectorImpl<std::pair<unsigned, unsigned>> &GCMap) {
137   unsigned CurIdx = getNumGcMapEntriesIdx();
138   unsigned GCMapSize = getConstMetaVal(*MI, CurIdx - 1);
139   CurIdx++;
140   for (unsigned N = 0; N < GCMapSize; ++N) {
141     unsigned B = MI->getOperand(CurIdx++).getImm();
142     unsigned D = MI->getOperand(CurIdx++).getImm();
143     GCMap.push_back(std::make_pair(B, D));
144   }
145 
146   return GCMapSize;
147 }
148 
149 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) {
150   if (StackMapVersion != 3)
151     llvm_unreachable("Unsupported stackmap version!");
152 }
153 
154 unsigned StackMaps::getNextMetaArgIdx(const MachineInstr *MI, unsigned CurIdx) {
155   assert(CurIdx < MI->getNumOperands() && "Bad meta arg index");
156   const auto &MO = MI->getOperand(CurIdx);
157   if (MO.isImm()) {
158     switch (MO.getImm()) {
159     default:
160       llvm_unreachable("Unrecognized operand type.");
161     case StackMaps::DirectMemRefOp:
162       CurIdx += 2;
163       break;
164     case StackMaps::IndirectMemRefOp:
165       CurIdx += 3;
166       break;
167     case StackMaps::ConstantOp:
168       ++CurIdx;
169       break;
170     }
171   }
172   ++CurIdx;
173   assert(CurIdx < MI->getNumOperands() && "points past operand list");
174   return CurIdx;
175 }
176 
177 /// Go up the super-register chain until we hit a valid dwarf register number.
178 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) {
179   int RegNum = TRI->getDwarfRegNum(Reg, false);
180   for (MCSuperRegIterator SR(Reg, TRI); SR.isValid() && RegNum < 0; ++SR)
181     RegNum = TRI->getDwarfRegNum(*SR, false);
182 
183   assert(RegNum >= 0 && "Invalid Dwarf register number.");
184   return (unsigned)RegNum;
185 }
186 
187 MachineInstr::const_mop_iterator
188 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI,
189                         MachineInstr::const_mop_iterator MOE, LocationVec &Locs,
190                         LiveOutVec &LiveOuts) const {
191   const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
192   if (MOI->isImm()) {
193     switch (MOI->getImm()) {
194     default:
195       llvm_unreachable("Unrecognized operand type.");
196     case StackMaps::DirectMemRefOp: {
197       auto &DL = AP.MF->getDataLayout();
198 
199       unsigned Size = DL.getPointerSizeInBits();
200       assert((Size % 8) == 0 && "Need pointer size in bytes.");
201       Size /= 8;
202       Register Reg = (++MOI)->getReg();
203       int64_t Imm = (++MOI)->getImm();
204       Locs.emplace_back(StackMaps::Location::Direct, Size,
205                         getDwarfRegNum(Reg, TRI), Imm);
206       break;
207     }
208     case StackMaps::IndirectMemRefOp: {
209       int64_t Size = (++MOI)->getImm();
210       assert(Size > 0 && "Need a valid size for indirect memory locations.");
211       Register Reg = (++MOI)->getReg();
212       int64_t Imm = (++MOI)->getImm();
213       Locs.emplace_back(StackMaps::Location::Indirect, Size,
214                         getDwarfRegNum(Reg, TRI), Imm);
215       break;
216     }
217     case StackMaps::ConstantOp: {
218       ++MOI;
219       assert(MOI->isImm() && "Expected constant operand.");
220       int64_t Imm = MOI->getImm();
221       Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm);
222       break;
223     }
224     }
225     return ++MOI;
226   }
227 
228   // The physical register number will ultimately be encoded as a DWARF regno.
229   // The stack map also records the size of a spill slot that can hold the
230   // register content. (The runtime can track the actual size of the data type
231   // if it needs to.)
232   if (MOI->isReg()) {
233     // Skip implicit registers (this includes our scratch registers)
234     if (MOI->isImplicit())
235       return ++MOI;
236 
237     assert(Register::isPhysicalRegister(MOI->getReg()) &&
238            "Virtreg operands should have been rewritten before now.");
239     const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg());
240     assert(!MOI->getSubReg() && "Physical subreg still around.");
241 
242     unsigned Offset = 0;
243     unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI);
244     unsigned LLVMRegNum = *TRI->getLLVMRegNum(DwarfRegNum, false);
245     unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg());
246     if (SubRegIdx)
247       Offset = TRI->getSubRegIdxOffset(SubRegIdx);
248 
249     Locs.emplace_back(Location::Register, TRI->getSpillSize(*RC),
250                       DwarfRegNum, Offset);
251     return ++MOI;
252   }
253 
254   if (MOI->isRegLiveOut())
255     LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut());
256 
257   return ++MOI;
258 }
259 
260 void StackMaps::print(raw_ostream &OS) {
261   const TargetRegisterInfo *TRI =
262       AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr;
263   OS << WSMP << "callsites:\n";
264   for (const auto &CSI : CSInfos) {
265     const LocationVec &CSLocs = CSI.Locations;
266     const LiveOutVec &LiveOuts = CSI.LiveOuts;
267 
268     OS << WSMP << "callsite " << CSI.ID << "\n";
269     OS << WSMP << "  has " << CSLocs.size() << " locations\n";
270 
271     unsigned Idx = 0;
272     for (const auto &Loc : CSLocs) {
273       OS << WSMP << "\t\tLoc " << Idx << ": ";
274       switch (Loc.Type) {
275       case Location::Unprocessed:
276         OS << "<Unprocessed operand>";
277         break;
278       case Location::Register:
279         OS << "Register ";
280         if (TRI)
281           OS << printReg(Loc.Reg, TRI);
282         else
283           OS << Loc.Reg;
284         break;
285       case Location::Direct:
286         OS << "Direct ";
287         if (TRI)
288           OS << printReg(Loc.Reg, TRI);
289         else
290           OS << Loc.Reg;
291         if (Loc.Offset)
292           OS << " + " << Loc.Offset;
293         break;
294       case Location::Indirect:
295         OS << "Indirect ";
296         if (TRI)
297           OS << printReg(Loc.Reg, TRI);
298         else
299           OS << Loc.Reg;
300         OS << "+" << Loc.Offset;
301         break;
302       case Location::Constant:
303         OS << "Constant " << Loc.Offset;
304         break;
305       case Location::ConstantIndex:
306         OS << "Constant Index " << Loc.Offset;
307         break;
308       }
309       OS << "\t[encoding: .byte " << Loc.Type << ", .byte 0"
310          << ", .short " << Loc.Size << ", .short " << Loc.Reg << ", .short 0"
311          << ", .int " << Loc.Offset << "]\n";
312       Idx++;
313     }
314 
315     OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n";
316 
317     Idx = 0;
318     for (const auto &LO : LiveOuts) {
319       OS << WSMP << "\t\tLO " << Idx << ": ";
320       if (TRI)
321         OS << printReg(LO.Reg, TRI);
322       else
323         OS << LO.Reg;
324       OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte "
325          << LO.Size << "]\n";
326       Idx++;
327     }
328   }
329 }
330 
331 /// Create a live-out register record for the given register Reg.
332 StackMaps::LiveOutReg
333 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const {
334   unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI);
335   unsigned Size = TRI->getSpillSize(*TRI->getMinimalPhysRegClass(Reg));
336   return LiveOutReg(Reg, DwarfRegNum, Size);
337 }
338 
339 /// Parse the register live-out mask and return a vector of live-out registers
340 /// that need to be recorded in the stackmap.
341 StackMaps::LiveOutVec
342 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const {
343   assert(Mask && "No register mask specified");
344   const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
345   LiveOutVec LiveOuts;
346 
347   // Create a LiveOutReg for each bit that is set in the register mask.
348   for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg)
349     if ((Mask[Reg / 32] >> (Reg % 32)) & 1)
350       LiveOuts.push_back(createLiveOutReg(Reg, TRI));
351 
352   // We don't need to keep track of a register if its super-register is already
353   // in the list. Merge entries that refer to the same dwarf register and use
354   // the maximum size that needs to be spilled.
355 
356   llvm::sort(LiveOuts, [](const LiveOutReg &LHS, const LiveOutReg &RHS) {
357     // Only sort by the dwarf register number.
358     return LHS.DwarfRegNum < RHS.DwarfRegNum;
359   });
360 
361   for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) {
362     for (auto II = std::next(I); II != E; ++II) {
363       if (I->DwarfRegNum != II->DwarfRegNum) {
364         // Skip all the now invalid entries.
365         I = --II;
366         break;
367       }
368       I->Size = std::max(I->Size, II->Size);
369       if (TRI->isSuperRegister(I->Reg, II->Reg))
370         I->Reg = II->Reg;
371       II->Reg = 0; // mark for deletion.
372     }
373   }
374 
375   llvm::erase_if(LiveOuts, [](const LiveOutReg &LO) { return LO.Reg == 0; });
376 
377   return LiveOuts;
378 }
379 
380 // See statepoint MI format description in StatepointOpers' class comment
381 // in include/llvm/CodeGen/StackMaps.h
382 void StackMaps::parseStatepointOpers(const MachineInstr &MI,
383                                      MachineInstr::const_mop_iterator MOI,
384                                      MachineInstr::const_mop_iterator MOE,
385                                      LocationVec &Locations,
386                                      LiveOutVec &LiveOuts) {
387   LLVM_DEBUG(dbgs() << "record statepoint : " << MI << "\n");
388   StatepointOpers SO(&MI);
389   MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // CC
390   MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Flags
391   MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Num Deopts
392 
393   // Record Deopt Args.
394   unsigned NumDeoptArgs = Locations.back().Offset;
395   assert(Locations.back().Type == Location::Constant);
396   assert(NumDeoptArgs == SO.getNumDeoptArgs());
397 
398   while (NumDeoptArgs--)
399     MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
400 
401   // Record gc base/derived pairs
402   assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
403   ++MOI;
404   assert(MOI->isImm());
405   unsigned NumGCPointers = MOI->getImm();
406   ++MOI;
407   if (NumGCPointers) {
408     // Map logical index of GC ptr to MI operand index.
409     SmallVector<unsigned, 8> GCPtrIndices;
410     unsigned GCPtrIdx = (unsigned)SO.getFirstGCPtrIdx();
411     assert((int)GCPtrIdx != -1);
412     assert(MOI - MI.operands_begin() == GCPtrIdx + 0LL);
413     while (NumGCPointers--) {
414       GCPtrIndices.push_back(GCPtrIdx);
415       GCPtrIdx = StackMaps::getNextMetaArgIdx(&MI, GCPtrIdx);
416     }
417 
418     SmallVector<std::pair<unsigned, unsigned>, 8> GCPairs;
419     unsigned NumGCPairs = SO.getGCPointerMap(GCPairs);
420     (void)NumGCPairs;
421     LLVM_DEBUG(dbgs() << "NumGCPairs = " << NumGCPairs << "\n");
422 
423     auto MOB = MI.operands_begin();
424     for (auto &P : GCPairs) {
425       assert(P.first < GCPtrIndices.size() && "base pointer index not found");
426       assert(P.second < GCPtrIndices.size() &&
427              "derived pointer index not found");
428       unsigned BaseIdx = GCPtrIndices[P.first];
429       unsigned DerivedIdx = GCPtrIndices[P.second];
430       LLVM_DEBUG(dbgs() << "Base : " << BaseIdx << " Derived : " << DerivedIdx
431                         << "\n");
432       (void)parseOperand(MOB + BaseIdx, MOE, Locations, LiveOuts);
433       (void)parseOperand(MOB + DerivedIdx, MOE, Locations, LiveOuts);
434     }
435 
436     MOI = MOB + GCPtrIdx;
437   }
438 
439   // Record gc allocas
440   assert(MOI < MOE);
441   assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
442   ++MOI;
443   unsigned NumAllocas = MOI->getImm();
444   ++MOI;
445   while (NumAllocas--) {
446     MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
447     assert(MOI < MOE);
448   }
449 }
450 
451 void StackMaps::recordStackMapOpers(const MCSymbol &MILabel,
452                                     const MachineInstr &MI, uint64_t ID,
453                                     MachineInstr::const_mop_iterator MOI,
454                                     MachineInstr::const_mop_iterator MOE,
455                                     bool recordResult) {
456   MCContext &OutContext = AP.OutStreamer->getContext();
457 
458   LocationVec Locations;
459   LiveOutVec LiveOuts;
460 
461   if (recordResult) {
462     assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value.");
463     parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations,
464                  LiveOuts);
465   }
466 
467   // Parse operands.
468   if (MI.getOpcode() == TargetOpcode::STATEPOINT)
469     parseStatepointOpers(MI, MOI, MOE, Locations, LiveOuts);
470   else
471     while (MOI != MOE)
472       MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
473 
474   // Move large constants into the constant pool.
475   for (auto &Loc : Locations) {
476     // Constants are encoded as sign-extended integers.
477     // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool.
478     if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) {
479       Loc.Type = Location::ConstantIndex;
480       // ConstPool is intentionally a MapVector of 'uint64_t's (as
481       // opposed to 'int64_t's).  We should never be in a situation
482       // where we have to insert either the tombstone or the empty
483       // keys into a map, and for a DenseMap<uint64_t, T> these are
484       // (uint64_t)0 and (uint64_t)-1.  They can be and are
485       // represented using 32 bit integers.
486       assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() &&
487              (uint64_t)Loc.Offset !=
488                  DenseMapInfo<uint64_t>::getTombstoneKey() &&
489              "empty and tombstone keys should fit in 32 bits!");
490       auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset));
491       Loc.Offset = Result.first - ConstPool.begin();
492     }
493   }
494 
495   // Create an expression to calculate the offset of the callsite from function
496   // entry.
497   const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub(
498       MCSymbolRefExpr::create(&MILabel, OutContext),
499       MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext);
500 
501   CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations),
502                        std::move(LiveOuts));
503 
504   // Record the stack size of the current function and update callsite count.
505   const MachineFrameInfo &MFI = AP.MF->getFrameInfo();
506   const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo();
507   bool HasDynamicFrameSize =
508       MFI.hasVarSizedObjects() || RegInfo->needsStackRealignment(*(AP.MF));
509   uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize();
510 
511   auto CurrentIt = FnInfos.find(AP.CurrentFnSym);
512   if (CurrentIt != FnInfos.end())
513     CurrentIt->second.RecordCount++;
514   else
515     FnInfos.insert(std::make_pair(AP.CurrentFnSym, FunctionInfo(FrameSize)));
516 }
517 
518 void StackMaps::recordStackMap(const MCSymbol &L, const MachineInstr &MI) {
519   assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap");
520 
521   StackMapOpers opers(&MI);
522   const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm();
523   recordStackMapOpers(L, MI, ID, std::next(MI.operands_begin(),
524                                            opers.getVarIdx()),
525                       MI.operands_end());
526 }
527 
528 void StackMaps::recordPatchPoint(const MCSymbol &L, const MachineInstr &MI) {
529   assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint");
530 
531   PatchPointOpers opers(&MI);
532   const int64_t ID = opers.getID();
533   auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx());
534   recordStackMapOpers(L, MI, ID, MOI, MI.operands_end(),
535                       opers.isAnyReg() && opers.hasDef());
536 
537 #ifndef NDEBUG
538   // verify anyregcc
539   auto &Locations = CSInfos.back().Locations;
540   if (opers.isAnyReg()) {
541     unsigned NArgs = opers.getNumCallArgs();
542     for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i)
543       assert(Locations[i].Type == Location::Register &&
544              "anyreg arg must be in reg.");
545   }
546 #endif
547 }
548 
549 void StackMaps::recordStatepoint(const MCSymbol &L, const MachineInstr &MI) {
550   assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint");
551 
552   StatepointOpers opers(&MI);
553   const unsigned StartIdx = opers.getVarIdx();
554   recordStackMapOpers(L, MI, opers.getID(), MI.operands_begin() + StartIdx,
555                       MI.operands_end(), false);
556 }
557 
558 /// Emit the stackmap header.
559 ///
560 /// Header {
561 ///   uint8  : Stack Map Version (currently 3)
562 ///   uint8  : Reserved (expected to be 0)
563 ///   uint16 : Reserved (expected to be 0)
564 /// }
565 /// uint32 : NumFunctions
566 /// uint32 : NumConstants
567 /// uint32 : NumRecords
568 void StackMaps::emitStackmapHeader(MCStreamer &OS) {
569   // Header.
570   OS.emitIntValue(StackMapVersion, 1); // Version.
571   OS.emitIntValue(0, 1);               // Reserved.
572   OS.emitInt16(0);                     // Reserved.
573 
574   // Num functions.
575   LLVM_DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n');
576   OS.emitInt32(FnInfos.size());
577   // Num constants.
578   LLVM_DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n');
579   OS.emitInt32(ConstPool.size());
580   // Num callsites.
581   LLVM_DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n');
582   OS.emitInt32(CSInfos.size());
583 }
584 
585 /// Emit the function frame record for each function.
586 ///
587 /// StkSizeRecord[NumFunctions] {
588 ///   uint64 : Function Address
589 ///   uint64 : Stack Size
590 ///   uint64 : Record Count
591 /// }
592 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) {
593   // Function Frame records.
594   LLVM_DEBUG(dbgs() << WSMP << "functions:\n");
595   for (auto const &FR : FnInfos) {
596     LLVM_DEBUG(dbgs() << WSMP << "function addr: " << FR.first
597                       << " frame size: " << FR.second.StackSize
598                       << " callsite count: " << FR.second.RecordCount << '\n');
599     OS.emitSymbolValue(FR.first, 8);
600     OS.emitIntValue(FR.second.StackSize, 8);
601     OS.emitIntValue(FR.second.RecordCount, 8);
602   }
603 }
604 
605 /// Emit the constant pool.
606 ///
607 /// int64  : Constants[NumConstants]
608 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) {
609   // Constant pool entries.
610   LLVM_DEBUG(dbgs() << WSMP << "constants:\n");
611   for (const auto &ConstEntry : ConstPool) {
612     LLVM_DEBUG(dbgs() << WSMP << ConstEntry.second << '\n');
613     OS.emitIntValue(ConstEntry.second, 8);
614   }
615 }
616 
617 /// Emit the callsite info for each callsite.
618 ///
619 /// StkMapRecord[NumRecords] {
620 ///   uint64 : PatchPoint ID
621 ///   uint32 : Instruction Offset
622 ///   uint16 : Reserved (record flags)
623 ///   uint16 : NumLocations
624 ///   Location[NumLocations] {
625 ///     uint8  : Register | Direct | Indirect | Constant | ConstantIndex
626 ///     uint8  : Size in Bytes
627 ///     uint16 : Dwarf RegNum
628 ///     int32  : Offset
629 ///   }
630 ///   uint16 : Padding
631 ///   uint16 : NumLiveOuts
632 ///   LiveOuts[NumLiveOuts] {
633 ///     uint16 : Dwarf RegNum
634 ///     uint8  : Reserved
635 ///     uint8  : Size in Bytes
636 ///   }
637 ///   uint32 : Padding (only if required to align to 8 byte)
638 /// }
639 ///
640 /// Location Encoding, Type, Value:
641 ///   0x1, Register, Reg                 (value in register)
642 ///   0x2, Direct, Reg + Offset          (frame index)
643 ///   0x3, Indirect, [Reg + Offset]      (spilled value)
644 ///   0x4, Constant, Offset              (small constant)
645 ///   0x5, ConstIndex, Constants[Offset] (large constant)
646 void StackMaps::emitCallsiteEntries(MCStreamer &OS) {
647   LLVM_DEBUG(print(dbgs()));
648   // Callsite entries.
649   for (const auto &CSI : CSInfos) {
650     const LocationVec &CSLocs = CSI.Locations;
651     const LiveOutVec &LiveOuts = CSI.LiveOuts;
652 
653     // Verify stack map entry. It's better to communicate a problem to the
654     // runtime than crash in case of in-process compilation. Currently, we do
655     // simple overflow checks, but we may eventually communicate other
656     // compilation errors this way.
657     if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) {
658       OS.emitIntValue(UINT64_MAX, 8); // Invalid ID.
659       OS.emitValue(CSI.CSOffsetExpr, 4);
660       OS.emitInt16(0); // Reserved.
661       OS.emitInt16(0); // 0 locations.
662       OS.emitInt16(0); // padding.
663       OS.emitInt16(0); // 0 live-out registers.
664       OS.emitInt32(0); // padding.
665       continue;
666     }
667 
668     OS.emitIntValue(CSI.ID, 8);
669     OS.emitValue(CSI.CSOffsetExpr, 4);
670 
671     // Reserved for flags.
672     OS.emitInt16(0);
673     OS.emitInt16(CSLocs.size());
674 
675     for (const auto &Loc : CSLocs) {
676       OS.emitIntValue(Loc.Type, 1);
677       OS.emitIntValue(0, 1);  // Reserved
678       OS.emitInt16(Loc.Size);
679       OS.emitInt16(Loc.Reg);
680       OS.emitInt16(0); // Reserved
681       OS.emitInt32(Loc.Offset);
682     }
683 
684     // Emit alignment to 8 byte.
685     OS.emitValueToAlignment(8);
686 
687     // Num live-out registers and padding to align to 4 byte.
688     OS.emitInt16(0);
689     OS.emitInt16(LiveOuts.size());
690 
691     for (const auto &LO : LiveOuts) {
692       OS.emitInt16(LO.DwarfRegNum);
693       OS.emitIntValue(0, 1);
694       OS.emitIntValue(LO.Size, 1);
695     }
696     // Emit alignment to 8 byte.
697     OS.emitValueToAlignment(8);
698   }
699 }
700 
701 /// Serialize the stackmap data.
702 void StackMaps::serializeToStackMapSection() {
703   (void)WSMP;
704   // Bail out if there's no stack map data.
705   assert((!CSInfos.empty() || ConstPool.empty()) &&
706          "Expected empty constant pool too!");
707   assert((!CSInfos.empty() || FnInfos.empty()) &&
708          "Expected empty function record too!");
709   if (CSInfos.empty())
710     return;
711 
712   MCContext &OutContext = AP.OutStreamer->getContext();
713   MCStreamer &OS = *AP.OutStreamer;
714 
715   // Create the section.
716   MCSection *StackMapSection =
717       OutContext.getObjectFileInfo()->getStackMapSection();
718   OS.SwitchSection(StackMapSection);
719 
720   // Emit a dummy symbol to force section inclusion.
721   OS.emitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps")));
722 
723   // Serialize data.
724   LLVM_DEBUG(dbgs() << "********** Stack Map Output **********\n");
725   emitStackmapHeader(OS);
726   emitFunctionFrameRecords(OS);
727   emitConstantPoolEntries(OS);
728   emitCallsiteEntries(OS);
729   OS.AddBlankLine();
730 
731   // Clean up.
732   CSInfos.clear();
733   ConstPool.clear();
734 }
735