1//===- StackMaps.cpp ------------------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "llvm/CodeGen/StackMaps.h"
10#include "llvm/ADT/DenseMapInfo.h"
11#include "llvm/ADT/STLExtras.h"
12#include "llvm/ADT/Twine.h"
13#include "llvm/CodeGen/AsmPrinter.h"
14#include "llvm/CodeGen/MachineFrameInfo.h"
15#include "llvm/CodeGen/MachineFunction.h"
16#include "llvm/CodeGen/MachineInstr.h"
17#include "llvm/CodeGen/MachineOperand.h"
18#include "llvm/CodeGen/TargetOpcodes.h"
19#include "llvm/CodeGen/TargetRegisterInfo.h"
20#include "llvm/CodeGen/TargetSubtargetInfo.h"
21#include "llvm/IR/DataLayout.h"
22#include "llvm/MC/MCContext.h"
23#include "llvm/MC/MCExpr.h"
24#include "llvm/MC/MCObjectFileInfo.h"
25#include "llvm/MC/MCRegisterInfo.h"
26#include "llvm/MC/MCStreamer.h"
27#include "llvm/Support/CommandLine.h"
28#include "llvm/Support/Debug.h"
29#include "llvm/Support/ErrorHandling.h"
30#include "llvm/Support/MathExtras.h"
31#include "llvm/Support/raw_ostream.h"
32#include <algorithm>
33#include <cassert>
34#include <cstdint>
35#include <iterator>
36#include <utility>
37
38using namespace llvm;
39
40#define DEBUG_TYPE "stackmaps"
41
42static cl::opt<int> StackMapVersion(
43 "stackmap-version", cl::init(Val: 3), cl::Hidden,
44 cl::desc("Specify the stackmap encoding version (default = 3)"));
45
46const char *StackMaps::WSMP = "Stack Maps: ";
47
48static uint64_t getConstMetaVal(const MachineInstr &MI, unsigned Idx) {
49 assert(MI.getOperand(Idx).isImm() &&
50 MI.getOperand(Idx).getImm() == StackMaps::ConstantOp);
51 const auto &MO = MI.getOperand(i: Idx + 1);
52 assert(MO.isImm());
53 return MO.getImm();
54}
55
56StackMapOpers::StackMapOpers(const MachineInstr *MI)
57 : MI(MI) {
58 assert(getVarIdx() <= MI->getNumOperands() &&
59 "invalid stackmap definition");
60}
61
62PatchPointOpers::PatchPointOpers(const MachineInstr *MI)
63 : MI(MI), HasDef(MI->getOperand(i: 0).isReg() && MI->getOperand(i: 0).isDef() &&
64 !MI->getOperand(i: 0).isImplicit()) {
65#ifndef NDEBUG
66 unsigned CheckStartIdx = 0, e = MI->getNumOperands();
67 while (CheckStartIdx < e && MI->getOperand(i: CheckStartIdx).isReg() &&
68 MI->getOperand(i: CheckStartIdx).isDef() &&
69 !MI->getOperand(i: CheckStartIdx).isImplicit())
70 ++CheckStartIdx;
71
72 assert(getMetaIdx() == CheckStartIdx &&
73 "Unexpected additional definition in Patchpoint intrinsic.");
74#endif
75}
76
77unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const {
78 if (!StartIdx)
79 StartIdx = getVarIdx();
80
81 // Find the next scratch register (implicit def and early clobber)
82 unsigned ScratchIdx = StartIdx, e = MI->getNumOperands();
83 while (ScratchIdx < e &&
84 !(MI->getOperand(i: ScratchIdx).isReg() &&
85 MI->getOperand(i: ScratchIdx).isDef() &&
86 MI->getOperand(i: ScratchIdx).isImplicit() &&
87 MI->getOperand(i: ScratchIdx).isEarlyClobber()))
88 ++ScratchIdx;
89
90 assert(ScratchIdx != e && "No scratch register available");
91 return ScratchIdx;
92}
93
94unsigned StatepointOpers::getNumGcMapEntriesIdx() {
95 // Take index of num of allocas and skip all allocas records.
96 unsigned CurIdx = getNumAllocaIdx();
97 unsigned NumAllocas = getConstMetaVal(MI: *MI, Idx: CurIdx - 1);
98 CurIdx++;
99 while (NumAllocas--)
100 CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
101 return CurIdx + 1; // skip <StackMaps::ConstantOp>
102}
103
104unsigned StatepointOpers::getNumAllocaIdx() {
105 // Take index of num of gc ptrs and skip all gc ptr records.
106 unsigned CurIdx = getNumGCPtrIdx();
107 unsigned NumGCPtrs = getConstMetaVal(MI: *MI, Idx: CurIdx - 1);
108 CurIdx++;
109 while (NumGCPtrs--)
110 CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
111 return CurIdx + 1; // skip <StackMaps::ConstantOp>
112}
113
114unsigned StatepointOpers::getNumGCPtrIdx() {
115 // Take index of num of deopt args and skip all deopt records.
116 unsigned CurIdx = getNumDeoptArgsIdx();
117 unsigned NumDeoptArgs = getConstMetaVal(MI: *MI, Idx: CurIdx - 1);
118 CurIdx++;
119 while (NumDeoptArgs--) {
120 CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
121 }
122 return CurIdx + 1; // skip <StackMaps::ConstantOp>
123}
124
125int StatepointOpers::getFirstGCPtrIdx() {
126 unsigned NumGCPtrsIdx = getNumGCPtrIdx();
127 unsigned NumGCPtrs = getConstMetaVal(MI: *MI, Idx: NumGCPtrsIdx - 1);
128 if (NumGCPtrs == 0)
129 return -1;
130 ++NumGCPtrsIdx; // skip <num gc ptrs>
131 assert(NumGCPtrsIdx < MI->getNumOperands());
132 return (int)NumGCPtrsIdx;
133}
134
135unsigned StatepointOpers::getGCPointerMap(
136 SmallVectorImpl<std::pair<unsigned, unsigned>> &GCMap) {
137 unsigned CurIdx = getNumGcMapEntriesIdx();
138 unsigned GCMapSize = getConstMetaVal(MI: *MI, Idx: CurIdx - 1);
139 CurIdx++;
140 for (unsigned N = 0; N < GCMapSize; ++N) {
141 unsigned B = MI->getOperand(i: CurIdx++).getImm();
142 unsigned D = MI->getOperand(i: CurIdx++).getImm();
143 GCMap.push_back(Elt: std::make_pair(x&: B, y&: D));
144 }
145
146 return GCMapSize;
147}
148
149bool StatepointOpers::isFoldableReg(Register Reg) const {
150 unsigned FoldableAreaStart = getVarIdx();
151 for (const MachineOperand &MO : MI->uses()) {
152 if (MO.getOperandNo() >= FoldableAreaStart)
153 break;
154 if (MO.isReg() && MO.getReg() == Reg)
155 return false;
156 }
157 return true;
158}
159
160bool StatepointOpers::isFoldableReg(const MachineInstr *MI, Register Reg) {
161 if (MI->getOpcode() != TargetOpcode::STATEPOINT)
162 return false;
163 return StatepointOpers(MI).isFoldableReg(Reg);
164}
165
166StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) {
167 if (StackMapVersion != 3)
168 llvm_unreachable("Unsupported stackmap version!");
169}
170
171unsigned StackMaps::getNextMetaArgIdx(const MachineInstr *MI, unsigned CurIdx) {
172 assert(CurIdx < MI->getNumOperands() && "Bad meta arg index");
173 const auto &MO = MI->getOperand(i: CurIdx);
174 if (MO.isImm()) {
175 switch (MO.getImm()) {
176 default:
177 llvm_unreachable("Unrecognized operand type.");
178 case StackMaps::DirectMemRefOp:
179 CurIdx += 2;
180 break;
181 case StackMaps::IndirectMemRefOp:
182 CurIdx += 3;
183 break;
184 case StackMaps::ConstantOp:
185 ++CurIdx;
186 break;
187 }
188 }
189 ++CurIdx;
190 assert(CurIdx < MI->getNumOperands() && "points past operand list");
191 return CurIdx;
192}
193
194/// Go up the super-register chain until we hit a valid dwarf register number.
195static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) {
196 int RegNum;
197 for (MCPhysReg SR : TRI->superregs_inclusive(Reg)) {
198 RegNum = TRI->getDwarfRegNum(RegNum: SR, isEH: false);
199 if (RegNum >= 0)
200 break;
201 }
202
203 assert(RegNum >= 0 && isUInt<16>(RegNum) && "Invalid Dwarf register number.");
204 return (unsigned)RegNum;
205}
206
207MachineInstr::const_mop_iterator
208StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI,
209 MachineInstr::const_mop_iterator MOE, LocationVec &Locs,
210 LiveOutVec &LiveOuts) {
211 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
212 if (MOI->isImm()) {
213 switch (MOI->getImm()) {
214 default:
215 llvm_unreachable("Unrecognized operand type.");
216 case StackMaps::DirectMemRefOp: {
217 auto &DL = AP.MF->getDataLayout();
218
219 unsigned Size = DL.getPointerSizeInBits();
220 assert((Size % 8) == 0 && "Need pointer size in bytes.");
221 Size /= 8;
222 Register Reg = (++MOI)->getReg();
223 int64_t Imm = (++MOI)->getImm();
224 Locs.emplace_back(Args: StackMaps::Location::Direct, Args&: Size,
225 Args: getDwarfRegNum(Reg, TRI), Args&: Imm);
226 break;
227 }
228 case StackMaps::IndirectMemRefOp: {
229 int64_t Size = (++MOI)->getImm();
230 assert(Size > 0 && "Need a valid size for indirect memory locations.");
231 Register Reg = (++MOI)->getReg();
232 int64_t Imm = (++MOI)->getImm();
233 Locs.emplace_back(Args: StackMaps::Location::Indirect, Args&: Size,
234 Args: getDwarfRegNum(Reg, TRI), Args&: Imm);
235 break;
236 }
237 case StackMaps::ConstantOp: {
238 ++MOI;
239 assert(MOI->isImm() && "Expected constant operand.");
240 int64_t Imm = MOI->getImm();
241 if (isInt<32>(x: Imm)) {
242 Locs.emplace_back(Args: Location::Constant, Args: sizeof(int64_t), Args: 0, Args&: Imm);
243 } else {
244 // ConstPool is intentionally a MapVector of 'uint64_t's (as
245 // opposed to 'int64_t's). We should never be in a situation
246 // where we have to insert either the tombstone or the empty
247 // keys into a map, and for a DenseMap<uint64_t, T> these are
248 // (uint64_t)0 and (uint64_t)-1. They can be and are
249 // represented using 32 bit integers.
250 assert((uint64_t)Imm != DenseMapInfo<uint64_t>::getEmptyKey() &&
251 (uint64_t)Imm != DenseMapInfo<uint64_t>::getTombstoneKey() &&
252 "empty and tombstone keys should fit in 32 bits!");
253 auto Result = ConstPool.insert(KV: std::make_pair(x&: Imm, y&: Imm));
254 Locs.emplace_back(Args: Location::ConstantIndex, Args: sizeof(int64_t), Args: 0,
255 Args: Result.first - ConstPool.begin());
256 }
257 break;
258 }
259 }
260 return ++MOI;
261 }
262
263 // The physical register number will ultimately be encoded as a DWARF regno.
264 // The stack map also records the size of a spill slot that can hold the
265 // register content. (The runtime can track the actual size of the data type
266 // if it needs to.)
267 if (MOI->isReg()) {
268 // Skip implicit registers (this includes our scratch registers)
269 if (MOI->isImplicit())
270 return ++MOI;
271
272 if (MOI->isUndef()) {
273 // Record `undef` register as constant. Use same value as ISel uses.
274 Locs.emplace_back(Args: Location::Constant, Args: sizeof(int64_t), Args: 0, Args: 0xFEFEFEFE);
275 return ++MOI;
276 }
277
278 assert(MOI->getReg().isPhysical() &&
279 "Virtreg operands should have been rewritten before now.");
280 const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(Reg: MOI->getReg());
281 assert(!MOI->getSubReg() && "Physical subreg still around.");
282
283 unsigned Offset = 0;
284 unsigned DwarfRegNum = getDwarfRegNum(Reg: MOI->getReg(), TRI);
285 unsigned LLVMRegNum = *TRI->getLLVMRegNum(RegNum: DwarfRegNum, isEH: false);
286 unsigned SubRegIdx = TRI->getSubRegIndex(RegNo: LLVMRegNum, SubRegNo: MOI->getReg());
287 if (SubRegIdx)
288 Offset = TRI->getSubRegIdxOffset(Idx: SubRegIdx);
289
290 Locs.emplace_back(Args: Location::Register, Args: TRI->getSpillSize(RC: *RC),
291 Args&: DwarfRegNum, Args&: Offset);
292 return ++MOI;
293 }
294
295 if (MOI->isRegLiveOut())
296 LiveOuts = parseRegisterLiveOutMask(Mask: MOI->getRegLiveOut());
297
298 return ++MOI;
299}
300
301void StackMaps::print(raw_ostream &OS) {
302 const TargetRegisterInfo *TRI =
303 AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr;
304 OS << WSMP << "callsites:\n";
305 for (const auto &CSI : CSInfos) {
306 const LocationVec &CSLocs = CSI.Locations;
307 const LiveOutVec &LiveOuts = CSI.LiveOuts;
308
309 OS << WSMP << "callsite " << CSI.ID << "\n";
310 OS << WSMP << " has " << CSLocs.size() << " locations\n";
311
312 unsigned Idx = 0;
313 for (const auto &Loc : CSLocs) {
314 OS << WSMP << "\t\tLoc " << Idx << ": ";
315 switch (Loc.Type) {
316 case Location::Unprocessed:
317 OS << "<Unprocessed operand>";
318 break;
319 case Location::Register:
320 OS << "Register ";
321 if (TRI)
322 OS << printReg(Reg: Loc.Reg, TRI);
323 else
324 OS << Loc.Reg;
325 break;
326 case Location::Direct:
327 OS << "Direct ";
328 if (TRI)
329 OS << printReg(Reg: Loc.Reg, TRI);
330 else
331 OS << Loc.Reg;
332 if (Loc.Offset)
333 OS << " + " << Loc.Offset;
334 break;
335 case Location::Indirect:
336 OS << "Indirect ";
337 if (TRI)
338 OS << printReg(Reg: Loc.Reg, TRI);
339 else
340 OS << Loc.Reg;
341 OS << "+" << Loc.Offset;
342 break;
343 case Location::Constant:
344 OS << "Constant " << Loc.Offset;
345 break;
346 case Location::ConstantIndex:
347 OS << "Constant Index " << Loc.Offset;
348 break;
349 }
350 OS << "\t[encoding: .byte " << Loc.Type << ", .byte 0"
351 << ", .short " << Loc.Size << ", .short " << Loc.Reg << ", .short 0"
352 << ", .int " << Loc.Offset << "]\n";
353 Idx++;
354 }
355
356 OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n";
357
358 Idx = 0;
359 for (const auto &LO : LiveOuts) {
360 OS << WSMP << "\t\tLO " << Idx << ": ";
361 if (TRI)
362 OS << printReg(Reg: LO.Reg, TRI);
363 else
364 OS << LO.Reg;
365 OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte "
366 << LO.Size << "]\n";
367 Idx++;
368 }
369 }
370}
371
372/// Create a live-out register record for the given register Reg.
373StackMaps::LiveOutReg
374StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const {
375 unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI);
376 unsigned Size = TRI->getSpillSize(RC: *TRI->getMinimalPhysRegClass(Reg));
377 return LiveOutReg(Reg, DwarfRegNum, Size);
378}
379
380/// Parse the register live-out mask and return a vector of live-out registers
381/// that need to be recorded in the stackmap.
382StackMaps::LiveOutVec
383StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const {
384 assert(Mask && "No register mask specified");
385 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
386 LiveOutVec LiveOuts;
387
388 // Create a LiveOutReg for each bit that is set in the register mask.
389 for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg)
390 if ((Mask[Reg / 32] >> (Reg % 32)) & 1)
391 LiveOuts.push_back(Elt: createLiveOutReg(Reg, TRI));
392
393 // We don't need to keep track of a register if its super-register is already
394 // in the list. Merge entries that refer to the same dwarf register and use
395 // the maximum size that needs to be spilled.
396
397 llvm::sort(C&: LiveOuts, Comp: [](const LiveOutReg &LHS, const LiveOutReg &RHS) {
398 // Only sort by the dwarf register number.
399 return LHS.DwarfRegNum < RHS.DwarfRegNum;
400 });
401
402 for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) {
403 for (auto *II = std::next(x: I); II != E; ++II) {
404 if (I->DwarfRegNum != II->DwarfRegNum) {
405 // Skip all the now invalid entries.
406 I = --II;
407 break;
408 }
409 I->Size = std::max(a: I->Size, b: II->Size);
410 if (I->Reg && TRI->isSuperRegister(RegA: I->Reg, RegB: II->Reg))
411 I->Reg = II->Reg;
412 II->Reg = 0; // mark for deletion.
413 }
414 }
415
416 llvm::erase_if(C&: LiveOuts, P: [](const LiveOutReg &LO) { return LO.Reg == 0; });
417
418 return LiveOuts;
419}
420
421// See statepoint MI format description in StatepointOpers' class comment
422// in include/llvm/CodeGen/StackMaps.h
423void StackMaps::parseStatepointOpers(const MachineInstr &MI,
424 MachineInstr::const_mop_iterator MOI,
425 MachineInstr::const_mop_iterator MOE,
426 LocationVec &Locations,
427 LiveOutVec &LiveOuts) {
428 LLVM_DEBUG(dbgs() << "record statepoint : " << MI << "\n");
429 StatepointOpers SO(&MI);
430 MOI = parseOperand(MOI, MOE, Locs&: Locations, LiveOuts); // CC
431 MOI = parseOperand(MOI, MOE, Locs&: Locations, LiveOuts); // Flags
432 MOI = parseOperand(MOI, MOE, Locs&: Locations, LiveOuts); // Num Deopts
433
434 // Record Deopt Args.
435 unsigned NumDeoptArgs = Locations.back().Offset;
436 assert(Locations.back().Type == Location::Constant);
437 assert(NumDeoptArgs == SO.getNumDeoptArgs());
438
439 while (NumDeoptArgs--)
440 MOI = parseOperand(MOI, MOE, Locs&: Locations, LiveOuts);
441
442 // Record gc base/derived pairs
443 assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
444 ++MOI;
445 assert(MOI->isImm());
446 unsigned NumGCPointers = MOI->getImm();
447 ++MOI;
448 if (NumGCPointers) {
449 // Map logical index of GC ptr to MI operand index.
450 SmallVector<unsigned, 8> GCPtrIndices;
451 unsigned GCPtrIdx = (unsigned)SO.getFirstGCPtrIdx();
452 assert((int)GCPtrIdx != -1);
453 assert(MOI - MI.operands_begin() == GCPtrIdx + 0LL);
454 while (NumGCPointers--) {
455 GCPtrIndices.push_back(Elt: GCPtrIdx);
456 GCPtrIdx = StackMaps::getNextMetaArgIdx(MI: &MI, CurIdx: GCPtrIdx);
457 }
458
459 SmallVector<std::pair<unsigned, unsigned>, 8> GCPairs;
460 unsigned NumGCPairs = SO.getGCPointerMap(GCMap&: GCPairs);
461 (void)NumGCPairs;
462 LLVM_DEBUG(dbgs() << "NumGCPairs = " << NumGCPairs << "\n");
463
464 auto MOB = MI.operands_begin();
465 for (auto &P : GCPairs) {
466 assert(P.first < GCPtrIndices.size() && "base pointer index not found");
467 assert(P.second < GCPtrIndices.size() &&
468 "derived pointer index not found");
469 unsigned BaseIdx = GCPtrIndices[P.first];
470 unsigned DerivedIdx = GCPtrIndices[P.second];
471 LLVM_DEBUG(dbgs() << "Base : " << BaseIdx << " Derived : " << DerivedIdx
472 << "\n");
473 (void)parseOperand(MOI: MOB + BaseIdx, MOE, Locs&: Locations, LiveOuts);
474 (void)parseOperand(MOI: MOB + DerivedIdx, MOE, Locs&: Locations, LiveOuts);
475 }
476
477 MOI = MOB + GCPtrIdx;
478 }
479
480 // Record gc allocas
481 assert(MOI < MOE);
482 assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
483 ++MOI;
484 unsigned NumAllocas = MOI->getImm();
485 ++MOI;
486 while (NumAllocas--) {
487 MOI = parseOperand(MOI, MOE, Locs&: Locations, LiveOuts);
488 assert(MOI < MOE);
489 }
490}
491
492void StackMaps::recordStackMapOpers(const MCSymbol &MILabel,
493 const MachineInstr &MI, uint64_t ID,
494 MachineInstr::const_mop_iterator MOI,
495 MachineInstr::const_mop_iterator MOE,
496 bool recordResult) {
497 MCContext &OutContext = AP.OutStreamer->getContext();
498
499 LocationVec Locations;
500 LiveOutVec LiveOuts;
501
502 if (recordResult) {
503 assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value.");
504 parseOperand(MOI: MI.operands_begin(), MOE: std::next(x: MI.operands_begin()), Locs&: Locations,
505 LiveOuts);
506 }
507
508 // Parse operands.
509 if (MI.getOpcode() == TargetOpcode::STATEPOINT)
510 parseStatepointOpers(MI, MOI, MOE, Locations, LiveOuts);
511 else
512 while (MOI != MOE)
513 MOI = parseOperand(MOI, MOE, Locs&: Locations, LiveOuts);
514
515 // Create an expression to calculate the offset of the callsite from function
516 // entry.
517 const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub(
518 LHS: MCSymbolRefExpr::create(Symbol: &MILabel, Ctx&: OutContext),
519 RHS: MCSymbolRefExpr::create(Symbol: AP.CurrentFnSymForSize, Ctx&: OutContext), Ctx&: OutContext);
520
521 CSInfos.emplace_back(args&: CSOffsetExpr, args&: ID, args: std::move(Locations),
522 args: std::move(LiveOuts));
523
524 // Record the stack size of the current function and update callsite count.
525 const MachineFrameInfo &MFI = AP.MF->getFrameInfo();
526 const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo();
527 bool HasDynamicFrameSize =
528 MFI.hasVarSizedObjects() || RegInfo->hasStackRealignment(MF: *(AP.MF));
529 uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize();
530
531 auto CurrentIt = FnInfos.find(Key: AP.CurrentFnSym);
532 if (CurrentIt != FnInfos.end())
533 CurrentIt->second.RecordCount++;
534 else
535 FnInfos.insert(KV: std::make_pair(x&: AP.CurrentFnSym, y: FunctionInfo(FrameSize)));
536}
537
538void StackMaps::recordStackMap(const MCSymbol &L, const MachineInstr &MI) {
539 assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap");
540
541 StackMapOpers opers(&MI);
542 const int64_t ID = MI.getOperand(i: PatchPointOpers::IDPos).getImm();
543 recordStackMapOpers(MILabel: L, MI, ID, MOI: std::next(x: MI.operands_begin(),
544 n: opers.getVarIdx()),
545 MOE: MI.operands_end());
546}
547
548void StackMaps::recordPatchPoint(const MCSymbol &L, const MachineInstr &MI) {
549 assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint");
550
551 PatchPointOpers opers(&MI);
552 const int64_t ID = opers.getID();
553 auto MOI = std::next(x: MI.operands_begin(), n: opers.getStackMapStartIdx());
554 recordStackMapOpers(MILabel: L, MI, ID, MOI, MOE: MI.operands_end(),
555 recordResult: opers.isAnyReg() && opers.hasDef());
556
557#ifndef NDEBUG
558 // verify anyregcc
559 auto &Locations = CSInfos.back().Locations;
560 if (opers.isAnyReg()) {
561 unsigned NArgs = opers.getNumCallArgs();
562 for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i)
563 assert(Locations[i].Type == Location::Register &&
564 "anyreg arg must be in reg.");
565 }
566#endif
567}
568
569void StackMaps::recordStatepoint(const MCSymbol &L, const MachineInstr &MI) {
570 assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint");
571
572 StatepointOpers opers(&MI);
573 const unsigned StartIdx = opers.getVarIdx();
574 recordStackMapOpers(MILabel: L, MI, ID: opers.getID(), MOI: MI.operands_begin() + StartIdx,
575 MOE: MI.operands_end(), recordResult: false);
576}
577
578/// Emit the stackmap header.
579///
580/// Header {
581/// uint8 : Stack Map Version (currently 3)
582/// uint8 : Reserved (expected to be 0)
583/// uint16 : Reserved (expected to be 0)
584/// }
585/// uint32 : NumFunctions
586/// uint32 : NumConstants
587/// uint32 : NumRecords
588void StackMaps::emitStackmapHeader(MCStreamer &OS) {
589 // Header.
590 OS.emitIntValue(Value: StackMapVersion, Size: 1); // Version.
591 OS.emitIntValue(Value: 0, Size: 1); // Reserved.
592 OS.emitInt16(Value: 0); // Reserved.
593
594 // Num functions.
595 LLVM_DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n');
596 OS.emitInt32(Value: FnInfos.size());
597 // Num constants.
598 LLVM_DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n');
599 OS.emitInt32(Value: ConstPool.size());
600 // Num callsites.
601 LLVM_DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n');
602 OS.emitInt32(Value: CSInfos.size());
603}
604
605/// Emit the function frame record for each function.
606///
607/// StkSizeRecord[NumFunctions] {
608/// uint64 : Function Address
609/// uint64 : Stack Size
610/// uint64 : Record Count
611/// }
612void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) {
613 // Function Frame records.
614 LLVM_DEBUG(dbgs() << WSMP << "functions:\n");
615 for (auto const &FR : FnInfos) {
616 LLVM_DEBUG(dbgs() << WSMP << "function addr: " << FR.first
617 << " frame size: " << FR.second.StackSize
618 << " callsite count: " << FR.second.RecordCount << '\n');
619 OS.emitSymbolValue(Sym: FR.first, Size: 8);
620 OS.emitIntValue(Value: FR.second.StackSize, Size: 8);
621 OS.emitIntValue(Value: FR.second.RecordCount, Size: 8);
622 }
623}
624
625/// Emit the constant pool.
626///
627/// int64 : Constants[NumConstants]
628void StackMaps::emitConstantPoolEntries(MCStreamer &OS) {
629 // Constant pool entries.
630 LLVM_DEBUG(dbgs() << WSMP << "constants:\n");
631 for (const auto &ConstEntry : ConstPool) {
632 LLVM_DEBUG(dbgs() << WSMP << ConstEntry.second << '\n');
633 OS.emitIntValue(Value: ConstEntry.second, Size: 8);
634 }
635}
636
637/// Emit the callsite info for each callsite.
638///
639/// StkMapRecord[NumRecords] {
640/// uint64 : PatchPoint ID
641/// uint32 : Instruction Offset
642/// uint16 : Reserved (record flags)
643/// uint16 : NumLocations
644/// Location[NumLocations] {
645/// uint8 : Register | Direct | Indirect | Constant | ConstantIndex
646/// uint8 : Size in Bytes
647/// uint16 : Dwarf RegNum
648/// int32 : Offset
649/// }
650/// uint16 : Padding
651/// uint16 : NumLiveOuts
652/// LiveOuts[NumLiveOuts] {
653/// uint16 : Dwarf RegNum
654/// uint8 : Reserved
655/// uint8 : Size in Bytes
656/// }
657/// uint32 : Padding (only if required to align to 8 byte)
658/// }
659///
660/// Location Encoding, Type, Value:
661/// 0x1, Register, Reg (value in register)
662/// 0x2, Direct, Reg + Offset (frame index)
663/// 0x3, Indirect, [Reg + Offset] (spilled value)
664/// 0x4, Constant, Offset (small constant)
665/// 0x5, ConstIndex, Constants[Offset] (large constant)
666void StackMaps::emitCallsiteEntries(MCStreamer &OS) {
667 LLVM_DEBUG(print(dbgs()));
668 // Callsite entries.
669 for (const auto &CSI : CSInfos) {
670 const LocationVec &CSLocs = CSI.Locations;
671 const LiveOutVec &LiveOuts = CSI.LiveOuts;
672
673 // Verify stack map entry. It's better to communicate a problem to the
674 // runtime than crash in case of in-process compilation. Currently, we do
675 // simple overflow checks, but we may eventually communicate other
676 // compilation errors this way.
677 if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) {
678 OS.emitIntValue(UINT64_MAX, Size: 8); // Invalid ID.
679 OS.emitValue(Value: CSI.CSOffsetExpr, Size: 4);
680 OS.emitInt16(Value: 0); // Reserved.
681 OS.emitInt16(Value: 0); // 0 locations.
682 OS.emitInt16(Value: 0); // padding.
683 OS.emitInt16(Value: 0); // 0 live-out registers.
684 OS.emitInt32(Value: 0); // padding.
685 continue;
686 }
687
688 OS.emitIntValue(Value: CSI.ID, Size: 8);
689 OS.emitValue(Value: CSI.CSOffsetExpr, Size: 4);
690
691 // Reserved for flags.
692 OS.emitInt16(Value: 0);
693 OS.emitInt16(Value: CSLocs.size());
694
695 for (const auto &Loc : CSLocs) {
696 OS.emitIntValue(Value: Loc.Type, Size: 1);
697 OS.emitIntValue(Value: 0, Size: 1); // Reserved
698 OS.emitInt16(Value: Loc.Size);
699 OS.emitInt16(Value: Loc.Reg);
700 OS.emitInt16(Value: 0); // Reserved
701 OS.emitInt32(Value: Loc.Offset);
702 }
703
704 // Emit alignment to 8 byte.
705 OS.emitValueToAlignment(Alignment: Align(8));
706
707 // Num live-out registers and padding to align to 4 byte.
708 OS.emitInt16(Value: 0);
709 OS.emitInt16(Value: LiveOuts.size());
710
711 for (const auto &LO : LiveOuts) {
712 OS.emitInt16(Value: LO.DwarfRegNum);
713 OS.emitIntValue(Value: 0, Size: 1);
714 OS.emitIntValue(Value: LO.Size, Size: 1);
715 }
716 // Emit alignment to 8 byte.
717 OS.emitValueToAlignment(Alignment: Align(8));
718 }
719}
720
721/// Serialize the stackmap data.
722void StackMaps::serializeToStackMapSection() {
723 (void)WSMP;
724 // Bail out if there's no stack map data.
725 assert((!CSInfos.empty() || ConstPool.empty()) &&
726 "Expected empty constant pool too!");
727 assert((!CSInfos.empty() || FnInfos.empty()) &&
728 "Expected empty function record too!");
729 if (CSInfos.empty())
730 return;
731
732 MCContext &OutContext = AP.OutStreamer->getContext();
733 MCStreamer &OS = *AP.OutStreamer;
734
735 // Create the section.
736 MCSection *StackMapSection =
737 OutContext.getObjectFileInfo()->getStackMapSection();
738 OS.switchSection(Section: StackMapSection);
739
740 // Emit a dummy symbol to force section inclusion.
741 OS.emitLabel(Symbol: OutContext.getOrCreateSymbol(Name: Twine("__LLVM_StackMaps")));
742
743 // Serialize data.
744 LLVM_DEBUG(dbgs() << "********** Stack Map Output **********\n");
745 emitStackmapHeader(OS);
746 emitFunctionFrameRecords(OS);
747 emitConstantPoolEntries(OS);
748 emitCallsiteEntries(OS);
749 OS.addBlankLine();
750
751 // Clean up.
752 CSInfos.clear();
753 ConstPool.clear();
754}
755

source code of llvm/lib/CodeGen/StackMaps.cpp