1//===- VarLocBasedImpl.cpp - Tracking Debug Value MIs with VarLoc class----===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8///
9/// \file VarLocBasedImpl.cpp
10///
11/// LiveDebugValues is an optimistic "available expressions" dataflow
12/// algorithm. The set of expressions is the set of machine locations
13/// (registers, spill slots, constants, and target indices) that a variable
14/// fragment might be located, qualified by a DIExpression and indirect-ness
15/// flag, while each variable is identified by a DebugVariable object. The
16/// availability of an expression begins when a DBG_VALUE instruction specifies
17/// the location of a DebugVariable, and continues until that location is
18/// clobbered or re-specified by a different DBG_VALUE for the same
19/// DebugVariable.
20///
21/// The output of LiveDebugValues is additional DBG_VALUE instructions,
22/// placed to extend variable locations as far they're available. This file
23/// and the VarLocBasedLDV class is an implementation that explicitly tracks
24/// locations, using the VarLoc class.
25///
26/// The canonical "available expressions" problem doesn't have expression
27/// clobbering, instead when a variable is re-assigned, any expressions using
28/// that variable get invalidated. LiveDebugValues can map onto "available
29/// expressions" by having every register represented by a variable, which is
30/// used in an expression that becomes available at a DBG_VALUE instruction.
31/// When the register is clobbered, its variable is effectively reassigned, and
32/// expressions computed from it become unavailable. A similar construct is
33/// needed when a DebugVariable has its location re-specified, to invalidate
34/// all other locations for that DebugVariable.
35///
36/// Using the dataflow analysis to compute the available expressions, we create
37/// a DBG_VALUE at the beginning of each block where the expression is
38/// live-in. This propagates variable locations into every basic block where
39/// the location can be determined, rather than only having DBG_VALUEs in blocks
40/// where locations are specified due to an assignment or some optimization.
41/// Movements of values between registers and spill slots are annotated with
42/// DBG_VALUEs too to track variable values bewteen locations. All this allows
43/// DbgEntityHistoryCalculator to focus on only the locations within individual
44/// blocks, facilitating testing and improving modularity.
45///
46/// We follow an optimisic dataflow approach, with this lattice:
47///
48/// \verbatim
49/// ┬ "Unknown"
50/// |
51/// v
52/// True
53/// |
54/// v
55/// ⊥ False
56/// \endverbatim With "True" signifying that the expression is available (and
57/// thus a DebugVariable's location is the corresponding register), while
58/// "False" signifies that the expression is unavailable. "Unknown"s never
59/// survive to the end of the analysis (see below).
60///
61/// Formally, all DebugVariable locations that are live-out of a block are
62/// initialized to \top. A blocks live-in values take the meet of the lattice
63/// value for every predecessors live-outs, except for the entry block, where
64/// all live-ins are \bot. The usual dataflow propagation occurs: the transfer
65/// function for a block assigns an expression for a DebugVariable to be "True"
66/// if a DBG_VALUE in the block specifies it; "False" if the location is
67/// clobbered; or the live-in value if it is unaffected by the block. We
68/// visit each block in reverse post order until a fixedpoint is reached. The
69/// solution produced is maximal.
70///
71/// Intuitively, we start by assuming that every expression / variable location
72/// is at least "True", and then propagate "False" from the entry block and any
73/// clobbers until there are no more changes to make. This gives us an accurate
74/// solution because all incorrect locations will have a "False" propagated into
75/// them. It also gives us a solution that copes well with loops by assuming
76/// that variable locations are live-through every loop, and then removing those
77/// that are not through dataflow.
78///
79/// Within LiveDebugValues: each variable location is represented by a
80/// VarLoc object that identifies the source variable, the set of
81/// machine-locations that currently describe it (a single location for
82/// DBG_VALUE or multiple for DBG_VALUE_LIST), and the DBG_VALUE inst that
83/// specifies the location. Each VarLoc is indexed in the (function-scope) \p
84/// VarLocMap, giving each VarLoc a set of unique indexes, each of which
85/// corresponds to one of the VarLoc's machine-locations and can be used to
86/// lookup the VarLoc in the VarLocMap. Rather than operate directly on machine
87/// locations, the dataflow analysis in this pass identifies locations by their
88/// indices in the VarLocMap, meaning all the variable locations in a block can
89/// be described by a sparse vector of VarLocMap indicies.
90///
91/// All the storage for the dataflow analysis is local to the ExtendRanges
92/// method and passed down to helper methods. "OutLocs" and "InLocs" record the
93/// in and out lattice values for each block. "OpenRanges" maintains a list of
94/// variable locations and, with the "process" method, evaluates the transfer
95/// function of each block. "flushPendingLocs" installs debug value instructions
96/// for each live-in location at the start of blocks, while "Transfers" records
97/// transfers of values between machine-locations.
98///
99/// We avoid explicitly representing the "Unknown" (\top) lattice value in the
100/// implementation. Instead, unvisited blocks implicitly have all lattice
101/// values set as "Unknown". After being visited, there will be path back to
102/// the entry block where the lattice value is "False", and as the transfer
103/// function cannot make new "Unknown" locations, there are no scenarios where
104/// a block can have an "Unknown" location after being visited. Similarly, we
105/// don't enumerate all possible variable locations before exploring the
106/// function: when a new location is discovered, all blocks previously explored
107/// were implicitly "False" but unrecorded, and become explicitly "False" when
108/// a new VarLoc is created with its bit not set in predecessor InLocs or
109/// OutLocs.
110///
111//===----------------------------------------------------------------------===//
112
113#include "LiveDebugValues.h"
114
115#include "llvm/ADT/CoalescingBitVector.h"
116#include "llvm/ADT/DenseMap.h"
117#include "llvm/ADT/PostOrderIterator.h"
118#include "llvm/ADT/SmallPtrSet.h"
119#include "llvm/ADT/SmallSet.h"
120#include "llvm/ADT/SmallVector.h"
121#include "llvm/ADT/Statistic.h"
122#include "llvm/BinaryFormat/Dwarf.h"
123#include "llvm/CodeGen/LexicalScopes.h"
124#include "llvm/CodeGen/MachineBasicBlock.h"
125#include "llvm/CodeGen/MachineFunction.h"
126#include "llvm/CodeGen/MachineInstr.h"
127#include "llvm/CodeGen/MachineInstrBuilder.h"
128#include "llvm/CodeGen/MachineMemOperand.h"
129#include "llvm/CodeGen/MachineOperand.h"
130#include "llvm/CodeGen/PseudoSourceValue.h"
131#include "llvm/CodeGen/TargetFrameLowering.h"
132#include "llvm/CodeGen/TargetInstrInfo.h"
133#include "llvm/CodeGen/TargetLowering.h"
134#include "llvm/CodeGen/TargetPassConfig.h"
135#include "llvm/CodeGen/TargetRegisterInfo.h"
136#include "llvm/CodeGen/TargetSubtargetInfo.h"
137#include "llvm/Config/llvm-config.h"
138#include "llvm/IR/DebugInfoMetadata.h"
139#include "llvm/IR/DebugLoc.h"
140#include "llvm/IR/Function.h"
141#include "llvm/MC/MCRegisterInfo.h"
142#include "llvm/Support/Casting.h"
143#include "llvm/Support/Debug.h"
144#include "llvm/Support/TypeSize.h"
145#include "llvm/Support/raw_ostream.h"
146#include "llvm/Target/TargetMachine.h"
147#include <algorithm>
148#include <cassert>
149#include <cstdint>
150#include <functional>
151#include <map>
152#include <optional>
153#include <queue>
154#include <tuple>
155#include <utility>
156#include <vector>
157
158using namespace llvm;
159
160#define DEBUG_TYPE "livedebugvalues"
161
162STATISTIC(NumInserted, "Number of DBG_VALUE instructions inserted");
163
164/// If \p Op is a stack or frame register return true, otherwise return false.
165/// This is used to avoid basing the debug entry values on the registers, since
166/// we do not support it at the moment.
167static bool isRegOtherThanSPAndFP(const MachineOperand &Op,
168 const MachineInstr &MI,
169 const TargetRegisterInfo *TRI) {
170 if (!Op.isReg())
171 return false;
172
173 const MachineFunction *MF = MI.getParent()->getParent();
174 const TargetLowering *TLI = MF->getSubtarget().getTargetLowering();
175 Register SP = TLI->getStackPointerRegisterToSaveRestore();
176 Register FP = TRI->getFrameRegister(MF: *MF);
177 Register Reg = Op.getReg();
178
179 return Reg && Reg != SP && Reg != FP;
180}
181
182namespace {
183
184// Max out the number of statically allocated elements in DefinedRegsSet, as
185// this prevents fallback to std::set::count() operations.
186using DefinedRegsSet = SmallSet<Register, 32>;
187
188// The IDs in this set correspond to MachineLocs in VarLocs, as well as VarLocs
189// that represent Entry Values; every VarLoc in the set will also appear
190// exactly once at Location=0.
191// As a result, each VarLoc may appear more than once in this "set", but each
192// range corresponding to a Reg, SpillLoc, or EntryValue type will still be a
193// "true" set (i.e. each VarLoc may appear only once), and the range Location=0
194// is the set of all VarLocs.
195using VarLocSet = CoalescingBitVector<uint64_t>;
196
197/// A type-checked pair of {Register Location (or 0), Index}, used to index
198/// into a \ref VarLocMap. This can be efficiently converted to a 64-bit int
199/// for insertion into a \ref VarLocSet, and efficiently converted back. The
200/// type-checker helps ensure that the conversions aren't lossy.
201///
202/// Why encode a location /into/ the VarLocMap index? This makes it possible
203/// to find the open VarLocs killed by a register def very quickly. This is a
204/// performance-critical operation for LiveDebugValues.
205struct LocIndex {
206 using u32_location_t = uint32_t;
207 using u32_index_t = uint32_t;
208
209 u32_location_t Location; // Physical registers live in the range [1;2^30) (see
210 // \ref MCRegister), so we have plenty of range left
211 // here to encode non-register locations.
212 u32_index_t Index;
213
214 /// The location that has an entry for every VarLoc in the map.
215 static constexpr u32_location_t kUniversalLocation = 0;
216
217 /// The first location that is reserved for VarLocs with locations of kind
218 /// RegisterKind.
219 static constexpr u32_location_t kFirstRegLocation = 1;
220
221 /// The first location greater than 0 that is not reserved for VarLocs with
222 /// locations of kind RegisterKind.
223 static constexpr u32_location_t kFirstInvalidRegLocation = 1 << 30;
224
225 /// A special location reserved for VarLocs with locations of kind
226 /// SpillLocKind.
227 static constexpr u32_location_t kSpillLocation = kFirstInvalidRegLocation;
228
229 /// A special location reserved for VarLocs of kind EntryValueBackupKind and
230 /// EntryValueCopyBackupKind.
231 static constexpr u32_location_t kEntryValueBackupLocation =
232 kFirstInvalidRegLocation + 1;
233
234 /// A special location reserved for VarLocs with locations of kind
235 /// WasmLocKind.
236 /// TODO Placing all Wasm target index locations in this single kWasmLocation
237 /// may cause slowdown in compilation time in very large functions. Consider
238 /// giving a each target index/offset pair its own u32_location_t if this
239 /// becomes a problem.
240 static constexpr u32_location_t kWasmLocation = kFirstInvalidRegLocation + 2;
241
242 LocIndex(u32_location_t Location, u32_index_t Index)
243 : Location(Location), Index(Index) {}
244
245 uint64_t getAsRawInteger() const {
246 return (static_cast<uint64_t>(Location) << 32) | Index;
247 }
248
249 template<typename IntT> static LocIndex fromRawInteger(IntT ID) {
250 static_assert(std::is_unsigned_v<IntT> && sizeof(ID) == sizeof(uint64_t),
251 "Cannot convert raw integer to LocIndex");
252 return {static_cast<u32_location_t>(ID >> 32),
253 static_cast<u32_index_t>(ID)};
254 }
255
256 /// Get the start of the interval reserved for VarLocs of kind RegisterKind
257 /// which reside in \p Reg. The end is at rawIndexForReg(Reg+1)-1.
258 static uint64_t rawIndexForReg(Register Reg) {
259 return LocIndex(Reg, 0).getAsRawInteger();
260 }
261
262 /// Return a range covering all set indices in the interval reserved for
263 /// \p Location in \p Set.
264 static auto indexRangeForLocation(const VarLocSet &Set,
265 u32_location_t Location) {
266 uint64_t Start = LocIndex(Location, 0).getAsRawInteger();
267 uint64_t End = LocIndex(Location + 1, 0).getAsRawInteger();
268 return Set.half_open_range(Start, End);
269 }
270};
271
272// Simple Set for storing all the VarLoc Indices at a Location bucket.
273using VarLocsInRange = SmallSet<LocIndex::u32_index_t, 32>;
274// Vector of all `LocIndex`s for a given VarLoc; the same Location should not
275// appear in any two of these, as each VarLoc appears at most once in any
276// Location bucket.
277using LocIndices = SmallVector<LocIndex, 2>;
278
279class VarLocBasedLDV : public LDVImpl {
280private:
281 const TargetRegisterInfo *TRI;
282 const TargetInstrInfo *TII;
283 const TargetFrameLowering *TFI;
284 TargetPassConfig *TPC;
285 BitVector CalleeSavedRegs;
286 LexicalScopes LS;
287 VarLocSet::Allocator Alloc;
288
289 const MachineInstr *LastNonDbgMI;
290
291 enum struct TransferKind { TransferCopy, TransferSpill, TransferRestore };
292
293 using FragmentInfo = DIExpression::FragmentInfo;
294 using OptFragmentInfo = std::optional<DIExpression::FragmentInfo>;
295
296 /// A pair of debug variable and value location.
297 struct VarLoc {
298 // The location at which a spilled variable resides. It consists of a
299 // register and an offset.
300 struct SpillLoc {
301 unsigned SpillBase;
302 StackOffset SpillOffset;
303 bool operator==(const SpillLoc &Other) const {
304 return SpillBase == Other.SpillBase && SpillOffset == Other.SpillOffset;
305 }
306 bool operator!=(const SpillLoc &Other) const {
307 return !(*this == Other);
308 }
309 };
310
311 // Target indices used for wasm-specific locations.
312 struct WasmLoc {
313 // One of TargetIndex values defined in WebAssembly.h. We deal with
314 // local-related TargetIndex in this analysis (TI_LOCAL and
315 // TI_LOCAL_INDIRECT). Stack operands (TI_OPERAND_STACK) will be handled
316 // separately WebAssemblyDebugFixup pass, and we don't associate debug
317 // info with values in global operands (TI_GLOBAL_RELOC) at the moment.
318 int Index;
319 int64_t Offset;
320 bool operator==(const WasmLoc &Other) const {
321 return Index == Other.Index && Offset == Other.Offset;
322 }
323 bool operator!=(const WasmLoc &Other) const { return !(*this == Other); }
324 };
325
326 /// Identity of the variable at this location.
327 const DebugVariable Var;
328
329 /// The expression applied to this location.
330 const DIExpression *Expr;
331
332 /// DBG_VALUE to clone var/expr information from if this location
333 /// is moved.
334 const MachineInstr &MI;
335
336 enum class MachineLocKind {
337 InvalidKind = 0,
338 RegisterKind,
339 SpillLocKind,
340 ImmediateKind,
341 WasmLocKind
342 };
343
344 enum class EntryValueLocKind {
345 NonEntryValueKind = 0,
346 EntryValueKind,
347 EntryValueBackupKind,
348 EntryValueCopyBackupKind
349 } EVKind = EntryValueLocKind::NonEntryValueKind;
350
351 /// The value location. Stored separately to avoid repeatedly
352 /// extracting it from MI.
353 union MachineLocValue {
354 uint64_t RegNo;
355 SpillLoc SpillLocation;
356 uint64_t Hash;
357 int64_t Immediate;
358 const ConstantFP *FPImm;
359 const ConstantInt *CImm;
360 WasmLoc WasmLocation;
361 MachineLocValue() : Hash(0) {}
362 };
363
364 /// A single machine location; its Kind is either a register, spill
365 /// location, or immediate value.
366 /// If the VarLoc is not a NonEntryValueKind, then it will use only a
367 /// single MachineLoc of RegisterKind.
368 struct MachineLoc {
369 MachineLocKind Kind;
370 MachineLocValue Value;
371 bool operator==(const MachineLoc &Other) const {
372 if (Kind != Other.Kind)
373 return false;
374 switch (Kind) {
375 case MachineLocKind::SpillLocKind:
376 return Value.SpillLocation == Other.Value.SpillLocation;
377 case MachineLocKind::WasmLocKind:
378 return Value.WasmLocation == Other.Value.WasmLocation;
379 case MachineLocKind::RegisterKind:
380 case MachineLocKind::ImmediateKind:
381 return Value.Hash == Other.Value.Hash;
382 default:
383 llvm_unreachable("Invalid kind");
384 }
385 }
386 bool operator<(const MachineLoc &Other) const {
387 switch (Kind) {
388 case MachineLocKind::SpillLocKind:
389 return std::make_tuple(
390 args: Kind, args: Value.SpillLocation.SpillBase,
391 args: Value.SpillLocation.SpillOffset.getFixed(),
392 args: Value.SpillLocation.SpillOffset.getScalable()) <
393 std::make_tuple(
394 args: Other.Kind, args: Other.Value.SpillLocation.SpillBase,
395 args: Other.Value.SpillLocation.SpillOffset.getFixed(),
396 args: Other.Value.SpillLocation.SpillOffset.getScalable());
397 case MachineLocKind::WasmLocKind:
398 return std::make_tuple(args: Kind, args: Value.WasmLocation.Index,
399 args: Value.WasmLocation.Offset) <
400 std::make_tuple(args: Other.Kind, args: Other.Value.WasmLocation.Index,
401 args: Other.Value.WasmLocation.Offset);
402 case MachineLocKind::RegisterKind:
403 case MachineLocKind::ImmediateKind:
404 return std::tie(args: Kind, args: Value.Hash) <
405 std::tie(args: Other.Kind, args: Other.Value.Hash);
406 default:
407 llvm_unreachable("Invalid kind");
408 }
409 }
410 };
411
412 /// The set of machine locations used to determine the variable's value, in
413 /// conjunction with Expr. Initially populated with MI's debug operands,
414 /// but may be transformed independently afterwards.
415 SmallVector<MachineLoc, 8> Locs;
416 /// Used to map the index of each location in Locs back to the index of its
417 /// original debug operand in MI. Used when multiple location operands are
418 /// coalesced and the original MI's operands need to be accessed while
419 /// emitting a debug value.
420 SmallVector<unsigned, 8> OrigLocMap;
421
422 VarLoc(const MachineInstr &MI)
423 : Var(MI.getDebugVariable(), MI.getDebugExpression(),
424 MI.getDebugLoc()->getInlinedAt()),
425 Expr(MI.getDebugExpression()), MI(MI) {
426 assert(MI.isDebugValue() && "not a DBG_VALUE");
427 assert((MI.isDebugValueList() || MI.getNumOperands() == 4) &&
428 "malformed DBG_VALUE");
429 for (const MachineOperand &Op : MI.debug_operands()) {
430 MachineLoc ML = GetLocForOp(Op);
431 auto It = find(Range&: Locs, Val: ML);
432 if (It == Locs.end()) {
433 Locs.push_back(Elt: ML);
434 OrigLocMap.push_back(Elt: MI.getDebugOperandIndex(Op: &Op));
435 } else {
436 // ML duplicates an element in Locs; replace references to Op
437 // with references to the duplicating element.
438 unsigned OpIdx = Locs.size();
439 unsigned DuplicatingIdx = std::distance(first: Locs.begin(), last: It);
440 Expr = DIExpression::replaceArg(Expr, OldArg: OpIdx, NewArg: DuplicatingIdx);
441 }
442 }
443
444 // We create the debug entry values from the factory functions rather
445 // than from this ctor.
446 assert(EVKind != EntryValueLocKind::EntryValueKind &&
447 !isEntryBackupLoc());
448 }
449
450 static MachineLoc GetLocForOp(const MachineOperand &Op) {
451 MachineLocKind Kind;
452 MachineLocValue Loc;
453 if (Op.isReg()) {
454 Kind = MachineLocKind::RegisterKind;
455 Loc.RegNo = Op.getReg();
456 } else if (Op.isImm()) {
457 Kind = MachineLocKind::ImmediateKind;
458 Loc.Immediate = Op.getImm();
459 } else if (Op.isFPImm()) {
460 Kind = MachineLocKind::ImmediateKind;
461 Loc.FPImm = Op.getFPImm();
462 } else if (Op.isCImm()) {
463 Kind = MachineLocKind::ImmediateKind;
464 Loc.CImm = Op.getCImm();
465 } else if (Op.isTargetIndex()) {
466 Kind = MachineLocKind::WasmLocKind;
467 Loc.WasmLocation = {.Index: Op.getIndex(), .Offset: Op.getOffset()};
468 } else
469 llvm_unreachable("Invalid Op kind for MachineLoc.");
470 return {.Kind: Kind, .Value: Loc};
471 }
472
473 /// Take the variable and machine-location in DBG_VALUE MI, and build an
474 /// entry location using the given expression.
475 static VarLoc CreateEntryLoc(const MachineInstr &MI,
476 const DIExpression *EntryExpr, Register Reg) {
477 VarLoc VL(MI);
478 assert(VL.Locs.size() == 1 &&
479 VL.Locs[0].Kind == MachineLocKind::RegisterKind);
480 VL.EVKind = EntryValueLocKind::EntryValueKind;
481 VL.Expr = EntryExpr;
482 VL.Locs[0].Value.RegNo = Reg;
483 return VL;
484 }
485
486 /// Take the variable and machine-location from the DBG_VALUE (from the
487 /// function entry), and build an entry value backup location. The backup
488 /// location will turn into the normal location if the backup is valid at
489 /// the time of the primary location clobbering.
490 static VarLoc CreateEntryBackupLoc(const MachineInstr &MI,
491 const DIExpression *EntryExpr) {
492 VarLoc VL(MI);
493 assert(VL.Locs.size() == 1 &&
494 VL.Locs[0].Kind == MachineLocKind::RegisterKind);
495 VL.EVKind = EntryValueLocKind::EntryValueBackupKind;
496 VL.Expr = EntryExpr;
497 return VL;
498 }
499
500 /// Take the variable and machine-location from the DBG_VALUE (from the
501 /// function entry), and build a copy of an entry value backup location by
502 /// setting the register location to NewReg.
503 static VarLoc CreateEntryCopyBackupLoc(const MachineInstr &MI,
504 const DIExpression *EntryExpr,
505 Register NewReg) {
506 VarLoc VL(MI);
507 assert(VL.Locs.size() == 1 &&
508 VL.Locs[0].Kind == MachineLocKind::RegisterKind);
509 VL.EVKind = EntryValueLocKind::EntryValueCopyBackupKind;
510 VL.Expr = EntryExpr;
511 VL.Locs[0].Value.RegNo = NewReg;
512 return VL;
513 }
514
515 /// Copy the register location in DBG_VALUE MI, updating the register to
516 /// be NewReg.
517 static VarLoc CreateCopyLoc(const VarLoc &OldVL, const MachineLoc &OldML,
518 Register NewReg) {
519 VarLoc VL = OldVL;
520 for (MachineLoc &ML : VL.Locs)
521 if (ML == OldML) {
522 ML.Kind = MachineLocKind::RegisterKind;
523 ML.Value.RegNo = NewReg;
524 return VL;
525 }
526 llvm_unreachable("Should have found OldML in new VarLoc.");
527 }
528
529 /// Take the variable described by DBG_VALUE* MI, and create a VarLoc
530 /// locating it in the specified spill location.
531 static VarLoc CreateSpillLoc(const VarLoc &OldVL, const MachineLoc &OldML,
532 unsigned SpillBase, StackOffset SpillOffset) {
533 VarLoc VL = OldVL;
534 for (MachineLoc &ML : VL.Locs)
535 if (ML == OldML) {
536 ML.Kind = MachineLocKind::SpillLocKind;
537 ML.Value.SpillLocation = {.SpillBase: SpillBase, .SpillOffset: SpillOffset};
538 return VL;
539 }
540 llvm_unreachable("Should have found OldML in new VarLoc.");
541 }
542
543 /// Create a DBG_VALUE representing this VarLoc in the given function.
544 /// Copies variable-specific information such as DILocalVariable and
545 /// inlining information from the original DBG_VALUE instruction, which may
546 /// have been several transfers ago.
547 MachineInstr *BuildDbgValue(MachineFunction &MF) const {
548 assert(!isEntryBackupLoc() &&
549 "Tried to produce DBG_VALUE for backup VarLoc");
550 const DebugLoc &DbgLoc = MI.getDebugLoc();
551 bool Indirect = MI.isIndirectDebugValue();
552 const auto &IID = MI.getDesc();
553 const DILocalVariable *Var = MI.getDebugVariable();
554 NumInserted++;
555
556 const DIExpression *DIExpr = Expr;
557 SmallVector<MachineOperand, 8> MOs;
558 for (unsigned I = 0, E = Locs.size(); I < E; ++I) {
559 MachineLocKind LocKind = Locs[I].Kind;
560 MachineLocValue Loc = Locs[I].Value;
561 const MachineOperand &Orig = MI.getDebugOperand(Index: OrigLocMap[I]);
562 switch (LocKind) {
563 case MachineLocKind::RegisterKind:
564 // An entry value is a register location -- but with an updated
565 // expression. The register location of such DBG_VALUE is always the
566 // one from the entry DBG_VALUE, it does not matter if the entry value
567 // was copied in to another register due to some optimizations.
568 // Non-entry value register locations are like the source
569 // DBG_VALUE, but with the register number from this VarLoc.
570 MOs.push_back(Elt: MachineOperand::CreateReg(
571 Reg: EVKind == EntryValueLocKind::EntryValueKind ? Orig.getReg()
572 : Register(Loc.RegNo),
573 isDef: false));
574 break;
575 case MachineLocKind::SpillLocKind: {
576 // Spills are indirect DBG_VALUEs, with a base register and offset.
577 // Use the original DBG_VALUEs expression to build the spilt location
578 // on top of. FIXME: spill locations created before this pass runs
579 // are not recognized, and not handled here.
580 unsigned Base = Loc.SpillLocation.SpillBase;
581 auto *TRI = MF.getSubtarget().getRegisterInfo();
582 if (MI.isNonListDebugValue()) {
583 auto Deref = Indirect ? DIExpression::DerefAfter : 0;
584 DIExpr = TRI->prependOffsetExpression(
585 Expr: DIExpr, PrependFlags: DIExpression::ApplyOffset | Deref,
586 Offset: Loc.SpillLocation.SpillOffset);
587 Indirect = true;
588 } else {
589 SmallVector<uint64_t, 4> Ops;
590 TRI->getOffsetOpcodes(Offset: Loc.SpillLocation.SpillOffset, Ops);
591 Ops.push_back(Elt: dwarf::DW_OP_deref);
592 DIExpr = DIExpression::appendOpsToArg(Expr: DIExpr, Ops, ArgNo: I);
593 }
594 MOs.push_back(Elt: MachineOperand::CreateReg(Reg: Base, isDef: false));
595 break;
596 }
597 case MachineLocKind::ImmediateKind: {
598 MOs.push_back(Elt: Orig);
599 break;
600 }
601 case MachineLocKind::WasmLocKind: {
602 MOs.push_back(Elt: Orig);
603 break;
604 }
605 case MachineLocKind::InvalidKind:
606 llvm_unreachable("Tried to produce DBG_VALUE for invalid VarLoc");
607 }
608 }
609 return BuildMI(MF, DL: DbgLoc, MCID: IID, IsIndirect: Indirect, MOs, Variable: Var, Expr: DIExpr);
610 }
611
612 /// Is the Loc field a constant or constant object?
613 bool isConstant(MachineLocKind Kind) const {
614 return Kind == MachineLocKind::ImmediateKind;
615 }
616
617 /// Check if the Loc field is an entry backup location.
618 bool isEntryBackupLoc() const {
619 return EVKind == EntryValueLocKind::EntryValueBackupKind ||
620 EVKind == EntryValueLocKind::EntryValueCopyBackupKind;
621 }
622
623 /// If this variable is described by register \p Reg holding the entry
624 /// value, return true.
625 bool isEntryValueBackupReg(Register Reg) const {
626 return EVKind == EntryValueLocKind::EntryValueBackupKind && usesReg(Reg);
627 }
628
629 /// If this variable is described by register \p Reg holding a copy of the
630 /// entry value, return true.
631 bool isEntryValueCopyBackupReg(Register Reg) const {
632 return EVKind == EntryValueLocKind::EntryValueCopyBackupKind &&
633 usesReg(Reg);
634 }
635
636 /// If this variable is described in whole or part by \p Reg, return true.
637 bool usesReg(Register Reg) const {
638 MachineLoc RegML;
639 RegML.Kind = MachineLocKind::RegisterKind;
640 RegML.Value.RegNo = Reg;
641 return is_contained(Range: Locs, Element: RegML);
642 }
643
644 /// If this variable is described in whole or part by \p Reg, return true.
645 unsigned getRegIdx(Register Reg) const {
646 for (unsigned Idx = 0; Idx < Locs.size(); ++Idx)
647 if (Locs[Idx].Kind == MachineLocKind::RegisterKind &&
648 Register{static_cast<unsigned>(Locs[Idx].Value.RegNo)} == Reg)
649 return Idx;
650 llvm_unreachable("Could not find given Reg in Locs");
651 }
652
653 /// If this variable is described in whole or part by 1 or more registers,
654 /// add each of them to \p Regs and return true.
655 bool getDescribingRegs(SmallVectorImpl<uint32_t> &Regs) const {
656 bool AnyRegs = false;
657 for (const auto &Loc : Locs)
658 if (Loc.Kind == MachineLocKind::RegisterKind) {
659 Regs.push_back(Elt: Loc.Value.RegNo);
660 AnyRegs = true;
661 }
662 return AnyRegs;
663 }
664
665 bool containsSpillLocs() const {
666 return any_of(Range: Locs, P: [](VarLoc::MachineLoc ML) {
667 return ML.Kind == VarLoc::MachineLocKind::SpillLocKind;
668 });
669 }
670
671 /// If this variable is described in whole or part by \p SpillLocation,
672 /// return true.
673 bool usesSpillLoc(SpillLoc SpillLocation) const {
674 MachineLoc SpillML;
675 SpillML.Kind = MachineLocKind::SpillLocKind;
676 SpillML.Value.SpillLocation = SpillLocation;
677 return is_contained(Range: Locs, Element: SpillML);
678 }
679
680 /// If this variable is described in whole or part by \p SpillLocation,
681 /// return the index .
682 unsigned getSpillLocIdx(SpillLoc SpillLocation) const {
683 for (unsigned Idx = 0; Idx < Locs.size(); ++Idx)
684 if (Locs[Idx].Kind == MachineLocKind::SpillLocKind &&
685 Locs[Idx].Value.SpillLocation == SpillLocation)
686 return Idx;
687 llvm_unreachable("Could not find given SpillLoc in Locs");
688 }
689
690 bool containsWasmLocs() const {
691 return any_of(Range: Locs, P: [](VarLoc::MachineLoc ML) {
692 return ML.Kind == VarLoc::MachineLocKind::WasmLocKind;
693 });
694 }
695
696 /// If this variable is described in whole or part by \p WasmLocation,
697 /// return true.
698 bool usesWasmLoc(WasmLoc WasmLocation) const {
699 MachineLoc WasmML;
700 WasmML.Kind = MachineLocKind::WasmLocKind;
701 WasmML.Value.WasmLocation = WasmLocation;
702 return is_contained(Range: Locs, Element: WasmML);
703 }
704
705 /// Determine whether the lexical scope of this value's debug location
706 /// dominates MBB.
707 bool dominates(LexicalScopes &LS, MachineBasicBlock &MBB) const {
708 return LS.dominates(DL: MI.getDebugLoc().get(), MBB: &MBB);
709 }
710
711#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
712 // TRI and TII can be null.
713 void dump(const TargetRegisterInfo *TRI, const TargetInstrInfo *TII,
714 raw_ostream &Out = dbgs()) const {
715 Out << "VarLoc(";
716 for (const MachineLoc &MLoc : Locs) {
717 if (Locs.begin() != &MLoc)
718 Out << ", ";
719 switch (MLoc.Kind) {
720 case MachineLocKind::RegisterKind:
721 Out << printReg(Reg: MLoc.Value.RegNo, TRI);
722 break;
723 case MachineLocKind::SpillLocKind:
724 Out << printReg(Reg: MLoc.Value.SpillLocation.SpillBase, TRI);
725 Out << "[" << MLoc.Value.SpillLocation.SpillOffset.getFixed() << " + "
726 << MLoc.Value.SpillLocation.SpillOffset.getScalable()
727 << "x vscale"
728 << "]";
729 break;
730 case MachineLocKind::ImmediateKind:
731 Out << MLoc.Value.Immediate;
732 break;
733 case MachineLocKind::WasmLocKind: {
734 if (TII) {
735 auto Indices = TII->getSerializableTargetIndices();
736 auto Found =
737 find_if(Range&: Indices, P: [&](const std::pair<int, const char *> &I) {
738 return I.first == MLoc.Value.WasmLocation.Index;
739 });
740 assert(Found != Indices.end());
741 Out << Found->second;
742 if (MLoc.Value.WasmLocation.Offset > 0)
743 Out << " + " << MLoc.Value.WasmLocation.Offset;
744 } else {
745 Out << "WasmLoc";
746 }
747 break;
748 }
749 case MachineLocKind::InvalidKind:
750 llvm_unreachable("Invalid VarLoc in dump method");
751 }
752 }
753
754 Out << ", \"" << Var.getVariable()->getName() << "\", " << *Expr << ", ";
755 if (Var.getInlinedAt())
756 Out << "!" << Var.getInlinedAt()->getMetadataID() << ")\n";
757 else
758 Out << "(null))";
759
760 if (isEntryBackupLoc())
761 Out << " (backup loc)\n";
762 else
763 Out << "\n";
764 }
765#endif
766
767 bool operator==(const VarLoc &Other) const {
768 return std::tie(args: EVKind, args: Var, args: Expr, args: Locs) ==
769 std::tie(args: Other.EVKind, args: Other.Var, args: Other.Expr, args: Other.Locs);
770 }
771
772 /// This operator guarantees that VarLocs are sorted by Variable first.
773 bool operator<(const VarLoc &Other) const {
774 return std::tie(args: Var, args: EVKind, args: Locs, args: Expr) <
775 std::tie(args: Other.Var, args: Other.EVKind, args: Other.Locs, args: Other.Expr);
776 }
777 };
778
779#ifndef NDEBUG
780 using VarVec = SmallVector<VarLoc, 32>;
781#endif
782
783 /// VarLocMap is used for two things:
784 /// 1) Assigning LocIndices to a VarLoc. The LocIndices can be used to
785 /// virtually insert a VarLoc into a VarLocSet.
786 /// 2) Given a LocIndex, look up the unique associated VarLoc.
787 class VarLocMap {
788 /// Map a VarLoc to an index within the vector reserved for its location
789 /// within Loc2Vars.
790 std::map<VarLoc, LocIndices> Var2Indices;
791
792 /// Map a location to a vector which holds VarLocs which live in that
793 /// location.
794 SmallDenseMap<LocIndex::u32_location_t, std::vector<VarLoc>> Loc2Vars;
795
796 public:
797 /// Retrieve LocIndices for \p VL.
798 LocIndices insert(const VarLoc &VL) {
799 LocIndices &Indices = Var2Indices[VL];
800 // If Indices is not empty, VL is already in the map.
801 if (!Indices.empty())
802 return Indices;
803 SmallVector<LocIndex::u32_location_t, 4> Locations;
804 // LocIndices are determined by EVKind and MLs; each Register has a
805 // unique location, while all SpillLocs use a single bucket, and any EV
806 // VarLocs use only the Backup bucket or none at all (except the
807 // compulsory entry at the universal location index). LocIndices will
808 // always have an index at the universal location index as the last index.
809 if (VL.EVKind == VarLoc::EntryValueLocKind::NonEntryValueKind) {
810 VL.getDescribingRegs(Regs&: Locations);
811 assert(all_of(Locations,
812 [](auto RegNo) {
813 return RegNo < LocIndex::kFirstInvalidRegLocation;
814 }) &&
815 "Physreg out of range?");
816 if (VL.containsSpillLocs())
817 Locations.push_back(Elt: LocIndex::kSpillLocation);
818 if (VL.containsWasmLocs())
819 Locations.push_back(Elt: LocIndex::kWasmLocation);
820 } else if (VL.EVKind != VarLoc::EntryValueLocKind::EntryValueKind) {
821 LocIndex::u32_location_t Loc = LocIndex::kEntryValueBackupLocation;
822 Locations.push_back(Elt: Loc);
823 }
824 Locations.push_back(Elt: LocIndex::kUniversalLocation);
825 for (LocIndex::u32_location_t Location : Locations) {
826 auto &Vars = Loc2Vars[Location];
827 Indices.push_back(
828 Elt: {Location, static_cast<LocIndex::u32_index_t>(Vars.size())});
829 Vars.push_back(x: VL);
830 }
831 return Indices;
832 }
833
834 LocIndices getAllIndices(const VarLoc &VL) const {
835 auto IndIt = Var2Indices.find(x: VL);
836 assert(IndIt != Var2Indices.end() && "VarLoc not tracked");
837 return IndIt->second;
838 }
839
840 /// Retrieve the unique VarLoc associated with \p ID.
841 const VarLoc &operator[](LocIndex ID) const {
842 auto LocIt = Loc2Vars.find(Val: ID.Location);
843 assert(LocIt != Loc2Vars.end() && "Location not tracked");
844 return LocIt->second[ID.Index];
845 }
846 };
847
848 using VarLocInMBB =
849 SmallDenseMap<const MachineBasicBlock *, std::unique_ptr<VarLocSet>>;
850 struct TransferDebugPair {
851 MachineInstr *TransferInst; ///< Instruction where this transfer occurs.
852 LocIndex LocationID; ///< Location number for the transfer dest.
853 };
854 using TransferMap = SmallVector<TransferDebugPair, 4>;
855 // Types for recording Entry Var Locations emitted by a single MachineInstr,
856 // as well as recording MachineInstr which last defined a register.
857 using InstToEntryLocMap = std::multimap<const MachineInstr *, LocIndex>;
858 using RegDefToInstMap = DenseMap<Register, MachineInstr *>;
859
860 // Types for recording sets of variable fragments that overlap. For a given
861 // local variable, we record all other fragments of that variable that could
862 // overlap it, to reduce search time.
863 using FragmentOfVar =
864 std::pair<const DILocalVariable *, DIExpression::FragmentInfo>;
865 using OverlapMap =
866 DenseMap<FragmentOfVar, SmallVector<DIExpression::FragmentInfo, 1>>;
867
868 // Helper while building OverlapMap, a map of all fragments seen for a given
869 // DILocalVariable.
870 using VarToFragments =
871 DenseMap<const DILocalVariable *, SmallSet<FragmentInfo, 4>>;
872
873 /// Collects all VarLocs from \p CollectFrom. Each unique VarLoc is added
874 /// to \p Collected once, in order of insertion into \p VarLocIDs.
875 static void collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected,
876 const VarLocSet &CollectFrom,
877 const VarLocMap &VarLocIDs);
878
879 /// Get the registers which are used by VarLocs of kind RegisterKind tracked
880 /// by \p CollectFrom.
881 void getUsedRegs(const VarLocSet &CollectFrom,
882 SmallVectorImpl<Register> &UsedRegs) const;
883
884 /// This holds the working set of currently open ranges. For fast
885 /// access, this is done both as a set of VarLocIDs, and a map of
886 /// DebugVariable to recent VarLocID. Note that a DBG_VALUE ends all
887 /// previous open ranges for the same variable. In addition, we keep
888 /// two different maps (Vars/EntryValuesBackupVars), so erase/insert
889 /// methods act differently depending on whether a VarLoc is primary
890 /// location or backup one. In the case the VarLoc is backup location
891 /// we will erase/insert from the EntryValuesBackupVars map, otherwise
892 /// we perform the operation on the Vars.
893 class OpenRangesSet {
894 VarLocSet::Allocator &Alloc;
895 VarLocSet VarLocs;
896 // Map the DebugVariable to recent primary location ID.
897 SmallDenseMap<DebugVariable, LocIndices, 8> Vars;
898 // Map the DebugVariable to recent backup location ID.
899 SmallDenseMap<DebugVariable, LocIndices, 8> EntryValuesBackupVars;
900 OverlapMap &OverlappingFragments;
901
902 public:
903 OpenRangesSet(VarLocSet::Allocator &Alloc, OverlapMap &_OLapMap)
904 : Alloc(Alloc), VarLocs(Alloc), OverlappingFragments(_OLapMap) {}
905
906 const VarLocSet &getVarLocs() const { return VarLocs; }
907
908 // Fetches all VarLocs in \p VarLocIDs and inserts them into \p Collected.
909 // This method is needed to get every VarLoc once, as each VarLoc may have
910 // multiple indices in a VarLocMap (corresponding to each applicable
911 // location), but all VarLocs appear exactly once at the universal location
912 // index.
913 void getUniqueVarLocs(SmallVectorImpl<VarLoc> &Collected,
914 const VarLocMap &VarLocIDs) const {
915 collectAllVarLocs(Collected, CollectFrom: VarLocs, VarLocIDs);
916 }
917
918 /// Terminate all open ranges for VL.Var by removing it from the set.
919 void erase(const VarLoc &VL);
920
921 /// Terminate all open ranges listed as indices in \c KillSet with
922 /// \c Location by removing them from the set.
923 void erase(const VarLocsInRange &KillSet, const VarLocMap &VarLocIDs,
924 LocIndex::u32_location_t Location);
925
926 /// Insert a new range into the set.
927 void insert(LocIndices VarLocIDs, const VarLoc &VL);
928
929 /// Insert a set of ranges.
930 void insertFromLocSet(const VarLocSet &ToLoad, const VarLocMap &Map);
931
932 std::optional<LocIndices> getEntryValueBackup(DebugVariable Var);
933
934 /// Empty the set.
935 void clear() {
936 VarLocs.clear();
937 Vars.clear();
938 EntryValuesBackupVars.clear();
939 }
940
941 /// Return whether the set is empty or not.
942 bool empty() const {
943 assert(Vars.empty() == EntryValuesBackupVars.empty() &&
944 Vars.empty() == VarLocs.empty() &&
945 "open ranges are inconsistent");
946 return VarLocs.empty();
947 }
948
949 /// Get an empty range of VarLoc IDs.
950 auto getEmptyVarLocRange() const {
951 return iterator_range<VarLocSet::const_iterator>(getVarLocs().end(),
952 getVarLocs().end());
953 }
954
955 /// Get all set IDs for VarLocs with MLs of kind RegisterKind in \p Reg.
956 auto getRegisterVarLocs(Register Reg) const {
957 return LocIndex::indexRangeForLocation(Set: getVarLocs(), Location: Reg);
958 }
959
960 /// Get all set IDs for VarLocs with MLs of kind SpillLocKind.
961 auto getSpillVarLocs() const {
962 return LocIndex::indexRangeForLocation(Set: getVarLocs(),
963 Location: LocIndex::kSpillLocation);
964 }
965
966 /// Get all set IDs for VarLocs of EVKind EntryValueBackupKind or
967 /// EntryValueCopyBackupKind.
968 auto getEntryValueBackupVarLocs() const {
969 return LocIndex::indexRangeForLocation(
970 Set: getVarLocs(), Location: LocIndex::kEntryValueBackupLocation);
971 }
972
973 /// Get all set IDs for VarLocs with MLs of kind WasmLocKind.
974 auto getWasmVarLocs() const {
975 return LocIndex::indexRangeForLocation(Set: getVarLocs(),
976 Location: LocIndex::kWasmLocation);
977 }
978 };
979
980 /// Collect all VarLoc IDs from \p CollectFrom for VarLocs with MLs of kind
981 /// RegisterKind which are located in any reg in \p Regs. The IDs for each
982 /// VarLoc correspond to entries in the universal location bucket, which every
983 /// VarLoc has exactly 1 entry for. Insert collected IDs into \p Collected.
984 static void collectIDsForRegs(VarLocsInRange &Collected,
985 const DefinedRegsSet &Regs,
986 const VarLocSet &CollectFrom,
987 const VarLocMap &VarLocIDs);
988
989 VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB, VarLocInMBB &Locs) {
990 std::unique_ptr<VarLocSet> &VLS = Locs[MBB];
991 if (!VLS)
992 VLS = std::make_unique<VarLocSet>(args&: Alloc);
993 return *VLS;
994 }
995
996 const VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB,
997 const VarLocInMBB &Locs) const {
998 auto It = Locs.find(Val: MBB);
999 assert(It != Locs.end() && "MBB not in map");
1000 return *It->second;
1001 }
1002
1003 /// Tests whether this instruction is a spill to a stack location.
1004 bool isSpillInstruction(const MachineInstr &MI, MachineFunction *MF);
1005
1006 /// Decide if @MI is a spill instruction and return true if it is. We use 2
1007 /// criteria to make this decision:
1008 /// - Is this instruction a store to a spill slot?
1009 /// - Is there a register operand that is both used and killed?
1010 /// TODO: Store optimization can fold spills into other stores (including
1011 /// other spills). We do not handle this yet (more than one memory operand).
1012 bool isLocationSpill(const MachineInstr &MI, MachineFunction *MF,
1013 Register &Reg);
1014
1015 /// Returns true if the given machine instruction is a debug value which we
1016 /// can emit entry values for.
1017 ///
1018 /// Currently, we generate debug entry values only for parameters that are
1019 /// unmodified throughout the function and located in a register.
1020 bool isEntryValueCandidate(const MachineInstr &MI,
1021 const DefinedRegsSet &Regs) const;
1022
1023 /// If a given instruction is identified as a spill, return the spill location
1024 /// and set \p Reg to the spilled register.
1025 std::optional<VarLoc::SpillLoc> isRestoreInstruction(const MachineInstr &MI,
1026 MachineFunction *MF,
1027 Register &Reg);
1028 /// Given a spill instruction, extract the register and offset used to
1029 /// address the spill location in a target independent way.
1030 VarLoc::SpillLoc extractSpillBaseRegAndOffset(const MachineInstr &MI);
1031 void insertTransferDebugPair(MachineInstr &MI, OpenRangesSet &OpenRanges,
1032 TransferMap &Transfers, VarLocMap &VarLocIDs,
1033 LocIndex OldVarID, TransferKind Kind,
1034 const VarLoc::MachineLoc &OldLoc,
1035 Register NewReg = Register());
1036
1037 void transferDebugValue(const MachineInstr &MI, OpenRangesSet &OpenRanges,
1038 VarLocMap &VarLocIDs,
1039 InstToEntryLocMap &EntryValTransfers,
1040 RegDefToInstMap &RegSetInstrs);
1041 void transferSpillOrRestoreInst(MachineInstr &MI, OpenRangesSet &OpenRanges,
1042 VarLocMap &VarLocIDs, TransferMap &Transfers);
1043 void cleanupEntryValueTransfers(const MachineInstr *MI,
1044 OpenRangesSet &OpenRanges,
1045 VarLocMap &VarLocIDs, const VarLoc &EntryVL,
1046 InstToEntryLocMap &EntryValTransfers);
1047 void removeEntryValue(const MachineInstr &MI, OpenRangesSet &OpenRanges,
1048 VarLocMap &VarLocIDs, const VarLoc &EntryVL,
1049 InstToEntryLocMap &EntryValTransfers,
1050 RegDefToInstMap &RegSetInstrs);
1051 void emitEntryValues(MachineInstr &MI, OpenRangesSet &OpenRanges,
1052 VarLocMap &VarLocIDs,
1053 InstToEntryLocMap &EntryValTransfers,
1054 VarLocsInRange &KillSet);
1055 void recordEntryValue(const MachineInstr &MI,
1056 const DefinedRegsSet &DefinedRegs,
1057 OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs);
1058 void transferRegisterCopy(MachineInstr &MI, OpenRangesSet &OpenRanges,
1059 VarLocMap &VarLocIDs, TransferMap &Transfers);
1060 void transferRegisterDef(MachineInstr &MI, OpenRangesSet &OpenRanges,
1061 VarLocMap &VarLocIDs,
1062 InstToEntryLocMap &EntryValTransfers,
1063 RegDefToInstMap &RegSetInstrs);
1064 void transferWasmDef(MachineInstr &MI, OpenRangesSet &OpenRanges,
1065 VarLocMap &VarLocIDs);
1066 bool transferTerminator(MachineBasicBlock *MBB, OpenRangesSet &OpenRanges,
1067 VarLocInMBB &OutLocs, const VarLocMap &VarLocIDs);
1068
1069 void process(MachineInstr &MI, OpenRangesSet &OpenRanges,
1070 VarLocMap &VarLocIDs, TransferMap &Transfers,
1071 InstToEntryLocMap &EntryValTransfers,
1072 RegDefToInstMap &RegSetInstrs);
1073
1074 void accumulateFragmentMap(MachineInstr &MI, VarToFragments &SeenFragments,
1075 OverlapMap &OLapMap);
1076
1077 bool join(MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs,
1078 const VarLocMap &VarLocIDs,
1079 SmallPtrSet<const MachineBasicBlock *, 16> &Visited,
1080 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks);
1081
1082 /// Create DBG_VALUE insts for inlocs that have been propagated but
1083 /// had their instruction creation deferred.
1084 void flushPendingLocs(VarLocInMBB &PendingInLocs, VarLocMap &VarLocIDs);
1085
1086 bool ExtendRanges(MachineFunction &MF, MachineDominatorTree *DomTree,
1087 TargetPassConfig *TPC, unsigned InputBBLimit,
1088 unsigned InputDbgValLimit) override;
1089
1090public:
1091 /// Default construct and initialize the pass.
1092 VarLocBasedLDV();
1093
1094 ~VarLocBasedLDV();
1095
1096 /// Print to ostream with a message.
1097 void printVarLocInMBB(const MachineFunction &MF, const VarLocInMBB &V,
1098 const VarLocMap &VarLocIDs, const char *msg,
1099 raw_ostream &Out) const;
1100};
1101
1102} // end anonymous namespace
1103
1104//===----------------------------------------------------------------------===//
1105// Implementation
1106//===----------------------------------------------------------------------===//
1107
1108VarLocBasedLDV::VarLocBasedLDV() = default;
1109
1110VarLocBasedLDV::~VarLocBasedLDV() = default;
1111
1112/// Erase a variable from the set of open ranges, and additionally erase any
1113/// fragments that may overlap it. If the VarLoc is a backup location, erase
1114/// the variable from the EntryValuesBackupVars set, indicating we should stop
1115/// tracking its backup entry location. Otherwise, if the VarLoc is primary
1116/// location, erase the variable from the Vars set.
1117void VarLocBasedLDV::OpenRangesSet::erase(const VarLoc &VL) {
1118 // Erasure helper.
1119 auto DoErase = [&VL, this](DebugVariable VarToErase) {
1120 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
1121 auto It = EraseFrom->find(Val: VarToErase);
1122 if (It != EraseFrom->end()) {
1123 LocIndices IDs = It->second;
1124 for (LocIndex ID : IDs)
1125 VarLocs.reset(Index: ID.getAsRawInteger());
1126 EraseFrom->erase(I: It);
1127 }
1128 };
1129
1130 DebugVariable Var = VL.Var;
1131
1132 // Erase the variable/fragment that ends here.
1133 DoErase(Var);
1134
1135 // Extract the fragment. Interpret an empty fragment as one that covers all
1136 // possible bits.
1137 FragmentInfo ThisFragment = Var.getFragmentOrDefault();
1138
1139 // There may be fragments that overlap the designated fragment. Look them up
1140 // in the pre-computed overlap map, and erase them too.
1141 auto MapIt = OverlappingFragments.find(Val: {Var.getVariable(), ThisFragment});
1142 if (MapIt != OverlappingFragments.end()) {
1143 for (auto Fragment : MapIt->second) {
1144 VarLocBasedLDV::OptFragmentInfo FragmentHolder;
1145 if (!DebugVariable::isDefaultFragment(F: Fragment))
1146 FragmentHolder = VarLocBasedLDV::OptFragmentInfo(Fragment);
1147 DoErase({Var.getVariable(), FragmentHolder, Var.getInlinedAt()});
1148 }
1149 }
1150}
1151
1152void VarLocBasedLDV::OpenRangesSet::erase(const VarLocsInRange &KillSet,
1153 const VarLocMap &VarLocIDs,
1154 LocIndex::u32_location_t Location) {
1155 VarLocSet RemoveSet(Alloc);
1156 for (LocIndex::u32_index_t ID : KillSet) {
1157 const VarLoc &VL = VarLocIDs[LocIndex(Location, ID)];
1158 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
1159 EraseFrom->erase(Val: VL.Var);
1160 LocIndices VLI = VarLocIDs.getAllIndices(VL);
1161 for (LocIndex ID : VLI)
1162 RemoveSet.set(ID.getAsRawInteger());
1163 }
1164 VarLocs.intersectWithComplement(Other: RemoveSet);
1165}
1166
1167void VarLocBasedLDV::OpenRangesSet::insertFromLocSet(const VarLocSet &ToLoad,
1168 const VarLocMap &Map) {
1169 VarLocsInRange UniqueVarLocIDs;
1170 DefinedRegsSet Regs;
1171 Regs.insert(V: LocIndex::kUniversalLocation);
1172 collectIDsForRegs(Collected&: UniqueVarLocIDs, Regs, CollectFrom: ToLoad, VarLocIDs: Map);
1173 for (uint64_t ID : UniqueVarLocIDs) {
1174 LocIndex Idx = LocIndex::fromRawInteger(ID);
1175 const VarLoc &VarL = Map[Idx];
1176 const LocIndices Indices = Map.getAllIndices(VL: VarL);
1177 insert(VarLocIDs: Indices, VL: VarL);
1178 }
1179}
1180
1181void VarLocBasedLDV::OpenRangesSet::insert(LocIndices VarLocIDs,
1182 const VarLoc &VL) {
1183 auto *InsertInto = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
1184 for (LocIndex ID : VarLocIDs)
1185 VarLocs.set(ID.getAsRawInteger());
1186 InsertInto->insert(KV: {VL.Var, VarLocIDs});
1187}
1188
1189/// Return the Loc ID of an entry value backup location, if it exists for the
1190/// variable.
1191std::optional<LocIndices>
1192VarLocBasedLDV::OpenRangesSet::getEntryValueBackup(DebugVariable Var) {
1193 auto It = EntryValuesBackupVars.find(Val: Var);
1194 if (It != EntryValuesBackupVars.end())
1195 return It->second;
1196
1197 return std::nullopt;
1198}
1199
1200void VarLocBasedLDV::collectIDsForRegs(VarLocsInRange &Collected,
1201 const DefinedRegsSet &Regs,
1202 const VarLocSet &CollectFrom,
1203 const VarLocMap &VarLocIDs) {
1204 assert(!Regs.empty() && "Nothing to collect");
1205 SmallVector<Register, 32> SortedRegs;
1206 append_range(C&: SortedRegs, R: Regs);
1207 array_pod_sort(Start: SortedRegs.begin(), End: SortedRegs.end());
1208 auto It = CollectFrom.find(Index: LocIndex::rawIndexForReg(Reg: SortedRegs.front()));
1209 auto End = CollectFrom.end();
1210 for (Register Reg : SortedRegs) {
1211 // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains
1212 // all possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which
1213 // live in Reg.
1214 uint64_t FirstIndexForReg = LocIndex::rawIndexForReg(Reg);
1215 uint64_t FirstInvalidIndex = LocIndex::rawIndexForReg(Reg: Reg + 1);
1216 It.advanceToLowerBound(Index: FirstIndexForReg);
1217
1218 // Iterate through that half-open interval and collect all the set IDs.
1219 for (; It != End && *It < FirstInvalidIndex; ++It) {
1220 LocIndex ItIdx = LocIndex::fromRawInteger(ID: *It);
1221 const VarLoc &VL = VarLocIDs[ItIdx];
1222 LocIndices LI = VarLocIDs.getAllIndices(VL);
1223 // For now, the back index is always the universal location index.
1224 assert(LI.back().Location == LocIndex::kUniversalLocation &&
1225 "Unexpected order of LocIndices for VarLoc; was it inserted into "
1226 "the VarLocMap correctly?");
1227 Collected.insert(V: LI.back().Index);
1228 }
1229
1230 if (It == End)
1231 return;
1232 }
1233}
1234
1235void VarLocBasedLDV::getUsedRegs(const VarLocSet &CollectFrom,
1236 SmallVectorImpl<Register> &UsedRegs) const {
1237 // All register-based VarLocs are assigned indices greater than or equal to
1238 // FirstRegIndex.
1239 uint64_t FirstRegIndex =
1240 LocIndex::rawIndexForReg(Reg: LocIndex::kFirstRegLocation);
1241 uint64_t FirstInvalidIndex =
1242 LocIndex::rawIndexForReg(Reg: LocIndex::kFirstInvalidRegLocation);
1243 for (auto It = CollectFrom.find(Index: FirstRegIndex),
1244 End = CollectFrom.find(Index: FirstInvalidIndex);
1245 It != End;) {
1246 // We found a VarLoc ID for a VarLoc that lives in a register. Figure out
1247 // which register and add it to UsedRegs.
1248 uint32_t FoundReg = LocIndex::fromRawInteger(ID: *It).Location;
1249 assert((UsedRegs.empty() || FoundReg != UsedRegs.back()) &&
1250 "Duplicate used reg");
1251 UsedRegs.push_back(Elt: FoundReg);
1252
1253 // Skip to the next /set/ register. Note that this finds a lower bound, so
1254 // even if there aren't any VarLocs living in `FoundReg+1`, we're still
1255 // guaranteed to move on to the next register (or to end()).
1256 uint64_t NextRegIndex = LocIndex::rawIndexForReg(Reg: FoundReg + 1);
1257 It.advanceToLowerBound(Index: NextRegIndex);
1258 }
1259}
1260
1261//===----------------------------------------------------------------------===//
1262// Debug Range Extension Implementation
1263//===----------------------------------------------------------------------===//
1264
1265#ifndef NDEBUG
1266void VarLocBasedLDV::printVarLocInMBB(const MachineFunction &MF,
1267 const VarLocInMBB &V,
1268 const VarLocMap &VarLocIDs,
1269 const char *msg,
1270 raw_ostream &Out) const {
1271 Out << '\n' << msg << '\n';
1272 for (const MachineBasicBlock &BB : MF) {
1273 if (!V.count(Val: &BB))
1274 continue;
1275 const VarLocSet &L = getVarLocsInMBB(MBB: &BB, Locs: V);
1276 if (L.empty())
1277 continue;
1278 SmallVector<VarLoc, 32> VarLocs;
1279 collectAllVarLocs(Collected&: VarLocs, CollectFrom: L, VarLocIDs);
1280 Out << "MBB: " << BB.getNumber() << ":\n";
1281 for (const VarLoc &VL : VarLocs) {
1282 Out << " Var: " << VL.Var.getVariable()->getName();
1283 Out << " MI: ";
1284 VL.dump(TRI, TII, Out);
1285 }
1286 }
1287 Out << "\n";
1288}
1289#endif
1290
1291VarLocBasedLDV::VarLoc::SpillLoc
1292VarLocBasedLDV::extractSpillBaseRegAndOffset(const MachineInstr &MI) {
1293 assert(MI.hasOneMemOperand() &&
1294 "Spill instruction does not have exactly one memory operand?");
1295 auto MMOI = MI.memoperands_begin();
1296 const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue();
1297 assert(PVal->kind() == PseudoSourceValue::FixedStack &&
1298 "Inconsistent memory operand in spill instruction");
1299 int FI = cast<FixedStackPseudoSourceValue>(Val: PVal)->getFrameIndex();
1300 const MachineBasicBlock *MBB = MI.getParent();
1301 Register Reg;
1302 StackOffset Offset = TFI->getFrameIndexReference(MF: *MBB->getParent(), FI, FrameReg&: Reg);
1303 return {.SpillBase: Reg, .SpillOffset: Offset};
1304}
1305
1306/// Do cleanup of \p EntryValTransfers created by \p TRInst, by removing the
1307/// Transfer, which uses the to-be-deleted \p EntryVL.
1308void VarLocBasedLDV::cleanupEntryValueTransfers(
1309 const MachineInstr *TRInst, OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs,
1310 const VarLoc &EntryVL, InstToEntryLocMap &EntryValTransfers) {
1311 if (EntryValTransfers.empty() || TRInst == nullptr)
1312 return;
1313
1314 auto TransRange = EntryValTransfers.equal_range(x: TRInst);
1315 for (auto &TDPair : llvm::make_range(x: TransRange.first, y: TransRange.second)) {
1316 const VarLoc &EmittedEV = VarLocIDs[TDPair.second];
1317 if (std::tie(args: EntryVL.Var, args: EntryVL.Locs[0].Value.RegNo, args: EntryVL.Expr) ==
1318 std::tie(args: EmittedEV.Var, args: EmittedEV.Locs[0].Value.RegNo,
1319 args: EmittedEV.Expr)) {
1320 OpenRanges.erase(VL: EmittedEV);
1321 EntryValTransfers.erase(x: TRInst);
1322 break;
1323 }
1324 }
1325}
1326
1327/// Try to salvage the debug entry value if we encounter a new debug value
1328/// describing the same parameter, otherwise stop tracking the value. Return
1329/// true if we should stop tracking the entry value and do the cleanup of
1330/// emitted Entry Value Transfers, otherwise return false.
1331void VarLocBasedLDV::removeEntryValue(const MachineInstr &MI,
1332 OpenRangesSet &OpenRanges,
1333 VarLocMap &VarLocIDs,
1334 const VarLoc &EntryVL,
1335 InstToEntryLocMap &EntryValTransfers,
1336 RegDefToInstMap &RegSetInstrs) {
1337 // Skip the DBG_VALUE which is the debug entry value itself.
1338 if (&MI == &EntryVL.MI)
1339 return;
1340
1341 // If the parameter's location is not register location, we can not track
1342 // the entry value any more. It doesn't have the TransferInst which defines
1343 // register, so no Entry Value Transfers have been emitted already.
1344 if (!MI.getDebugOperand(Index: 0).isReg())
1345 return;
1346
1347 // Try to get non-debug instruction responsible for the DBG_VALUE.
1348 const MachineInstr *TransferInst = nullptr;
1349 Register Reg = MI.getDebugOperand(Index: 0).getReg();
1350 if (Reg.isValid() && RegSetInstrs.contains(Val: Reg))
1351 TransferInst = RegSetInstrs.find(Val: Reg)->second;
1352
1353 // Case of the parameter's DBG_VALUE at the start of entry MBB.
1354 if (!TransferInst && !LastNonDbgMI && MI.getParent()->isEntryBlock())
1355 return;
1356
1357 // If the debug expression from the DBG_VALUE is not empty, we can assume the
1358 // parameter's value has changed indicating that we should stop tracking its
1359 // entry value as well.
1360 if (MI.getDebugExpression()->getNumElements() == 0 && TransferInst) {
1361 // If the DBG_VALUE comes from a copy instruction that copies the entry
1362 // value, it means the parameter's value has not changed and we should be
1363 // able to use its entry value.
1364 // TODO: Try to keep tracking of an entry value if we encounter a propagated
1365 // DBG_VALUE describing the copy of the entry value. (Propagated entry value
1366 // does not indicate the parameter modification.)
1367 auto DestSrc = TII->isCopyLikeInstr(MI: *TransferInst);
1368 if (DestSrc) {
1369 const MachineOperand *SrcRegOp, *DestRegOp;
1370 SrcRegOp = DestSrc->Source;
1371 DestRegOp = DestSrc->Destination;
1372 if (Reg == DestRegOp->getReg()) {
1373 for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) {
1374 const VarLoc &VL = VarLocIDs[LocIndex::fromRawInteger(ID)];
1375 if (VL.isEntryValueCopyBackupReg(Reg) &&
1376 // Entry Values should not be variadic.
1377 VL.MI.getDebugOperand(Index: 0).getReg() == SrcRegOp->getReg())
1378 return;
1379 }
1380 }
1381 }
1382 }
1383
1384 LLVM_DEBUG(dbgs() << "Deleting a DBG entry value because of: ";
1385 MI.print(dbgs(), /*IsStandalone*/ false,
1386 /*SkipOpers*/ false, /*SkipDebugLoc*/ false,
1387 /*AddNewLine*/ true, TII));
1388 cleanupEntryValueTransfers(TRInst: TransferInst, OpenRanges, VarLocIDs, EntryVL,
1389 EntryValTransfers);
1390 OpenRanges.erase(VL: EntryVL);
1391}
1392
1393/// End all previous ranges related to @MI and start a new range from @MI
1394/// if it is a DBG_VALUE instr.
1395void VarLocBasedLDV::transferDebugValue(const MachineInstr &MI,
1396 OpenRangesSet &OpenRanges,
1397 VarLocMap &VarLocIDs,
1398 InstToEntryLocMap &EntryValTransfers,
1399 RegDefToInstMap &RegSetInstrs) {
1400 if (!MI.isDebugValue())
1401 return;
1402 const DILocalVariable *Var = MI.getDebugVariable();
1403 const DIExpression *Expr = MI.getDebugExpression();
1404 const DILocation *DebugLoc = MI.getDebugLoc();
1405 const DILocation *InlinedAt = DebugLoc->getInlinedAt();
1406 assert(Var->isValidLocationForIntrinsic(DebugLoc) &&
1407 "Expected inlined-at fields to agree");
1408
1409 DebugVariable V(Var, Expr, InlinedAt);
1410
1411 // Check if this DBG_VALUE indicates a parameter's value changing.
1412 // If that is the case, we should stop tracking its entry value.
1413 auto EntryValBackupID = OpenRanges.getEntryValueBackup(Var: V);
1414 if (Var->isParameter() && EntryValBackupID) {
1415 const VarLoc &EntryVL = VarLocIDs[EntryValBackupID->back()];
1416 removeEntryValue(MI, OpenRanges, VarLocIDs, EntryVL, EntryValTransfers,
1417 RegSetInstrs);
1418 }
1419
1420 if (all_of(Range: MI.debug_operands(), P: [](const MachineOperand &MO) {
1421 return (MO.isReg() && MO.getReg()) || MO.isImm() || MO.isFPImm() ||
1422 MO.isCImm() || MO.isTargetIndex();
1423 })) {
1424 // Use normal VarLoc constructor for registers and immediates.
1425 VarLoc VL(MI);
1426 // End all previous ranges of VL.Var.
1427 OpenRanges.erase(VL);
1428
1429 LocIndices IDs = VarLocIDs.insert(VL);
1430 // Add the VarLoc to OpenRanges from this DBG_VALUE.
1431 OpenRanges.insert(VarLocIDs: IDs, VL);
1432 } else if (MI.memoperands().size() > 0) {
1433 llvm_unreachable("DBG_VALUE with mem operand encountered after regalloc?");
1434 } else {
1435 // This must be an undefined location. If it has an open range, erase it.
1436 assert(MI.isUndefDebugValue() &&
1437 "Unexpected non-undef DBG_VALUE encountered");
1438 VarLoc VL(MI);
1439 OpenRanges.erase(VL);
1440 }
1441}
1442
1443// This should be removed later, doesn't fit the new design.
1444void VarLocBasedLDV::collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected,
1445 const VarLocSet &CollectFrom,
1446 const VarLocMap &VarLocIDs) {
1447 // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains all
1448 // possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which live
1449 // in Reg.
1450 uint64_t FirstIndex = LocIndex::rawIndexForReg(Reg: LocIndex::kUniversalLocation);
1451 uint64_t FirstInvalidIndex =
1452 LocIndex::rawIndexForReg(Reg: LocIndex::kUniversalLocation + 1);
1453 // Iterate through that half-open interval and collect all the set IDs.
1454 for (auto It = CollectFrom.find(Index: FirstIndex), End = CollectFrom.end();
1455 It != End && *It < FirstInvalidIndex; ++It) {
1456 LocIndex RegIdx = LocIndex::fromRawInteger(ID: *It);
1457 Collected.push_back(Elt: VarLocIDs[RegIdx]);
1458 }
1459}
1460
1461/// Turn the entry value backup locations into primary locations.
1462void VarLocBasedLDV::emitEntryValues(MachineInstr &MI,
1463 OpenRangesSet &OpenRanges,
1464 VarLocMap &VarLocIDs,
1465 InstToEntryLocMap &EntryValTransfers,
1466 VarLocsInRange &KillSet) {
1467 // Do not insert entry value locations after a terminator.
1468 if (MI.isTerminator())
1469 return;
1470
1471 for (uint32_t ID : KillSet) {
1472 // The KillSet IDs are indices for the universal location bucket.
1473 LocIndex Idx = LocIndex(LocIndex::kUniversalLocation, ID);
1474 const VarLoc &VL = VarLocIDs[Idx];
1475 if (!VL.Var.getVariable()->isParameter())
1476 continue;
1477
1478 auto DebugVar = VL.Var;
1479 std::optional<LocIndices> EntryValBackupIDs =
1480 OpenRanges.getEntryValueBackup(Var: DebugVar);
1481
1482 // If the parameter has the entry value backup, it means we should
1483 // be able to use its entry value.
1484 if (!EntryValBackupIDs)
1485 continue;
1486
1487 const VarLoc &EntryVL = VarLocIDs[EntryValBackupIDs->back()];
1488 VarLoc EntryLoc = VarLoc::CreateEntryLoc(MI: EntryVL.MI, EntryExpr: EntryVL.Expr,
1489 Reg: EntryVL.Locs[0].Value.RegNo);
1490 LocIndices EntryValueIDs = VarLocIDs.insert(VL: EntryLoc);
1491 assert(EntryValueIDs.size() == 1 &&
1492 "EntryValue loc should not be variadic");
1493 EntryValTransfers.insert(x: {&MI, EntryValueIDs.back()});
1494 OpenRanges.insert(VarLocIDs: EntryValueIDs, VL: EntryLoc);
1495 }
1496}
1497
1498/// Create new TransferDebugPair and insert it in \p Transfers. The VarLoc
1499/// with \p OldVarID should be deleted form \p OpenRanges and replaced with
1500/// new VarLoc. If \p NewReg is different than default zero value then the
1501/// new location will be register location created by the copy like instruction,
1502/// otherwise it is variable's location on the stack.
1503void VarLocBasedLDV::insertTransferDebugPair(
1504 MachineInstr &MI, OpenRangesSet &OpenRanges, TransferMap &Transfers,
1505 VarLocMap &VarLocIDs, LocIndex OldVarID, TransferKind Kind,
1506 const VarLoc::MachineLoc &OldLoc, Register NewReg) {
1507 const VarLoc &OldVarLoc = VarLocIDs[OldVarID];
1508
1509 auto ProcessVarLoc = [&MI, &OpenRanges, &Transfers, &VarLocIDs](VarLoc &VL) {
1510 LocIndices LocIds = VarLocIDs.insert(VL);
1511
1512 // Close this variable's previous location range.
1513 OpenRanges.erase(VL);
1514
1515 // Record the new location as an open range, and a postponed transfer
1516 // inserting a DBG_VALUE for this location.
1517 OpenRanges.insert(VarLocIDs: LocIds, VL);
1518 assert(!MI.isTerminator() && "Cannot insert DBG_VALUE after terminator");
1519 TransferDebugPair MIP = {.TransferInst: &MI, .LocationID: LocIds.back()};
1520 Transfers.push_back(Elt: MIP);
1521 };
1522
1523 // End all previous ranges of VL.Var.
1524 OpenRanges.erase(VL: VarLocIDs[OldVarID]);
1525 switch (Kind) {
1526 case TransferKind::TransferCopy: {
1527 assert(NewReg &&
1528 "No register supplied when handling a copy of a debug value");
1529 // Create a DBG_VALUE instruction to describe the Var in its new
1530 // register location.
1531 VarLoc VL = VarLoc::CreateCopyLoc(OldVL: OldVarLoc, OldML: OldLoc, NewReg);
1532 ProcessVarLoc(VL);
1533 LLVM_DEBUG({
1534 dbgs() << "Creating VarLoc for register copy:";
1535 VL.dump(TRI, TII);
1536 });
1537 return;
1538 }
1539 case TransferKind::TransferSpill: {
1540 // Create a DBG_VALUE instruction to describe the Var in its spilled
1541 // location.
1542 VarLoc::SpillLoc SpillLocation = extractSpillBaseRegAndOffset(MI);
1543 VarLoc VL = VarLoc::CreateSpillLoc(
1544 OldVL: OldVarLoc, OldML: OldLoc, SpillBase: SpillLocation.SpillBase, SpillOffset: SpillLocation.SpillOffset);
1545 ProcessVarLoc(VL);
1546 LLVM_DEBUG({
1547 dbgs() << "Creating VarLoc for spill:";
1548 VL.dump(TRI, TII);
1549 });
1550 return;
1551 }
1552 case TransferKind::TransferRestore: {
1553 assert(NewReg &&
1554 "No register supplied when handling a restore of a debug value");
1555 // DebugInstr refers to the pre-spill location, therefore we can reuse
1556 // its expression.
1557 VarLoc VL = VarLoc::CreateCopyLoc(OldVL: OldVarLoc, OldML: OldLoc, NewReg);
1558 ProcessVarLoc(VL);
1559 LLVM_DEBUG({
1560 dbgs() << "Creating VarLoc for restore:";
1561 VL.dump(TRI, TII);
1562 });
1563 return;
1564 }
1565 }
1566 llvm_unreachable("Invalid transfer kind");
1567}
1568
1569/// A definition of a register may mark the end of a range.
1570void VarLocBasedLDV::transferRegisterDef(MachineInstr &MI,
1571 OpenRangesSet &OpenRanges,
1572 VarLocMap &VarLocIDs,
1573 InstToEntryLocMap &EntryValTransfers,
1574 RegDefToInstMap &RegSetInstrs) {
1575
1576 // Meta Instructions do not affect the debug liveness of any register they
1577 // define.
1578 if (MI.isMetaInstruction())
1579 return;
1580
1581 MachineFunction *MF = MI.getMF();
1582 const TargetLowering *TLI = MF->getSubtarget().getTargetLowering();
1583 Register SP = TLI->getStackPointerRegisterToSaveRestore();
1584
1585 // Find the regs killed by MI, and find regmasks of preserved regs.
1586 DefinedRegsSet DeadRegs;
1587 SmallVector<const uint32_t *, 4> RegMasks;
1588 for (const MachineOperand &MO : MI.operands()) {
1589 // Determine whether the operand is a register def.
1590 if (MO.isReg() && MO.isDef() && MO.getReg() && MO.getReg().isPhysical() &&
1591 !(MI.isCall() && MO.getReg() == SP)) {
1592 // Remove ranges of all aliased registers.
1593 for (MCRegAliasIterator RAI(MO.getReg(), TRI, true); RAI.isValid(); ++RAI)
1594 // FIXME: Can we break out of this loop early if no insertion occurs?
1595 DeadRegs.insert(V: *RAI);
1596 RegSetInstrs.erase(Val: MO.getReg());
1597 RegSetInstrs.insert(KV: {MO.getReg(), &MI});
1598 } else if (MO.isRegMask()) {
1599 RegMasks.push_back(Elt: MO.getRegMask());
1600 }
1601 }
1602
1603 // Erase VarLocs which reside in one of the dead registers. For performance
1604 // reasons, it's critical to not iterate over the full set of open VarLocs.
1605 // Iterate over the set of dying/used regs instead.
1606 if (!RegMasks.empty()) {
1607 SmallVector<Register, 32> UsedRegs;
1608 getUsedRegs(CollectFrom: OpenRanges.getVarLocs(), UsedRegs);
1609 for (Register Reg : UsedRegs) {
1610 // Remove ranges of all clobbered registers. Register masks don't usually
1611 // list SP as preserved. Assume that call instructions never clobber SP,
1612 // because some backends (e.g., AArch64) never list SP in the regmask.
1613 // While the debug info may be off for an instruction or two around
1614 // callee-cleanup calls, transferring the DEBUG_VALUE across the call is
1615 // still a better user experience.
1616 if (Reg == SP)
1617 continue;
1618 bool AnyRegMaskKillsReg =
1619 any_of(Range&: RegMasks, P: [Reg](const uint32_t *RegMask) {
1620 return MachineOperand::clobbersPhysReg(RegMask, PhysReg: Reg);
1621 });
1622 if (AnyRegMaskKillsReg)
1623 DeadRegs.insert(V: Reg);
1624 if (AnyRegMaskKillsReg) {
1625 RegSetInstrs.erase(Val: Reg);
1626 RegSetInstrs.insert(KV: {Reg, &MI});
1627 }
1628 }
1629 }
1630
1631 if (DeadRegs.empty())
1632 return;
1633
1634 VarLocsInRange KillSet;
1635 collectIDsForRegs(Collected&: KillSet, Regs: DeadRegs, CollectFrom: OpenRanges.getVarLocs(), VarLocIDs);
1636 OpenRanges.erase(KillSet, VarLocIDs, Location: LocIndex::kUniversalLocation);
1637
1638 if (TPC) {
1639 auto &TM = TPC->getTM<TargetMachine>();
1640 if (TM.Options.ShouldEmitDebugEntryValues())
1641 emitEntryValues(MI, OpenRanges, VarLocIDs, EntryValTransfers, KillSet);
1642 }
1643}
1644
1645void VarLocBasedLDV::transferWasmDef(MachineInstr &MI,
1646 OpenRangesSet &OpenRanges,
1647 VarLocMap &VarLocIDs) {
1648 // If this is not a Wasm local.set or local.tee, which sets local values,
1649 // return.
1650 int Index;
1651 int64_t Offset;
1652 if (!TII->isExplicitTargetIndexDef(MI, Index, Offset))
1653 return;
1654
1655 // Find the target indices killed by MI, and delete those variable locations
1656 // from the open range.
1657 VarLocsInRange KillSet;
1658 VarLoc::WasmLoc Loc{.Index: Index, .Offset: Offset};
1659 for (uint64_t ID : OpenRanges.getWasmVarLocs()) {
1660 LocIndex Idx = LocIndex::fromRawInteger(ID);
1661 const VarLoc &VL = VarLocIDs[Idx];
1662 assert(VL.containsWasmLocs() && "Broken VarLocSet?");
1663 if (VL.usesWasmLoc(WasmLocation: Loc))
1664 KillSet.insert(V: ID);
1665 }
1666 OpenRanges.erase(KillSet, VarLocIDs, Location: LocIndex::kWasmLocation);
1667}
1668
1669bool VarLocBasedLDV::isSpillInstruction(const MachineInstr &MI,
1670 MachineFunction *MF) {
1671 // TODO: Handle multiple stores folded into one.
1672 if (!MI.hasOneMemOperand())
1673 return false;
1674
1675 if (!MI.getSpillSize(TII) && !MI.getFoldedSpillSize(TII))
1676 return false; // This is not a spill instruction, since no valid size was
1677 // returned from either function.
1678
1679 return true;
1680}
1681
1682bool VarLocBasedLDV::isLocationSpill(const MachineInstr &MI,
1683 MachineFunction *MF, Register &Reg) {
1684 if (!isSpillInstruction(MI, MF))
1685 return false;
1686
1687 auto isKilledReg = [&](const MachineOperand MO, Register &Reg) {
1688 if (!MO.isReg() || !MO.isUse()) {
1689 Reg = 0;
1690 return false;
1691 }
1692 Reg = MO.getReg();
1693 return MO.isKill();
1694 };
1695
1696 for (const MachineOperand &MO : MI.operands()) {
1697 // In a spill instruction generated by the InlineSpiller the spilled
1698 // register has its kill flag set.
1699 if (isKilledReg(MO, Reg))
1700 return true;
1701 if (Reg != 0) {
1702 // Check whether next instruction kills the spilled register.
1703 // FIXME: Current solution does not cover search for killed register in
1704 // bundles and instructions further down the chain.
1705 auto NextI = std::next(x: MI.getIterator());
1706 // Skip next instruction that points to basic block end iterator.
1707 if (MI.getParent()->end() == NextI)
1708 continue;
1709 Register RegNext;
1710 for (const MachineOperand &MONext : NextI->operands()) {
1711 // Return true if we came across the register from the
1712 // previous spill instruction that is killed in NextI.
1713 if (isKilledReg(MONext, RegNext) && RegNext == Reg)
1714 return true;
1715 }
1716 }
1717 }
1718 // Return false if we didn't find spilled register.
1719 return false;
1720}
1721
1722std::optional<VarLocBasedLDV::VarLoc::SpillLoc>
1723VarLocBasedLDV::isRestoreInstruction(const MachineInstr &MI,
1724 MachineFunction *MF, Register &Reg) {
1725 if (!MI.hasOneMemOperand())
1726 return std::nullopt;
1727
1728 // FIXME: Handle folded restore instructions with more than one memory
1729 // operand.
1730 if (MI.getRestoreSize(TII)) {
1731 Reg = MI.getOperand(i: 0).getReg();
1732 return extractSpillBaseRegAndOffset(MI);
1733 }
1734 return std::nullopt;
1735}
1736
1737/// A spilled register may indicate that we have to end the current range of
1738/// a variable and create a new one for the spill location.
1739/// A restored register may indicate the reverse situation.
1740/// We don't want to insert any instructions in process(), so we just create
1741/// the DBG_VALUE without inserting it and keep track of it in \p Transfers.
1742/// It will be inserted into the BB when we're done iterating over the
1743/// instructions.
1744void VarLocBasedLDV::transferSpillOrRestoreInst(MachineInstr &MI,
1745 OpenRangesSet &OpenRanges,
1746 VarLocMap &VarLocIDs,
1747 TransferMap &Transfers) {
1748 MachineFunction *MF = MI.getMF();
1749 TransferKind TKind;
1750 Register Reg;
1751 std::optional<VarLoc::SpillLoc> Loc;
1752
1753 LLVM_DEBUG(dbgs() << "Examining instruction: "; MI.dump(););
1754
1755 // First, if there are any DBG_VALUEs pointing at a spill slot that is
1756 // written to, then close the variable location. The value in memory
1757 // will have changed.
1758 VarLocsInRange KillSet;
1759 if (isSpillInstruction(MI, MF)) {
1760 Loc = extractSpillBaseRegAndOffset(MI);
1761 for (uint64_t ID : OpenRanges.getSpillVarLocs()) {
1762 LocIndex Idx = LocIndex::fromRawInteger(ID);
1763 const VarLoc &VL = VarLocIDs[Idx];
1764 assert(VL.containsSpillLocs() && "Broken VarLocSet?");
1765 if (VL.usesSpillLoc(SpillLocation: *Loc)) {
1766 // This location is overwritten by the current instruction -- terminate
1767 // the open range, and insert an explicit DBG_VALUE $noreg.
1768 //
1769 // Doing this at a later stage would require re-interpreting all
1770 // DBG_VALUes and DIExpressions to identify whether they point at
1771 // memory, and then analysing all memory writes to see if they
1772 // overwrite that memory, which is expensive.
1773 //
1774 // At this stage, we already know which DBG_VALUEs are for spills and
1775 // where they are located; it's best to fix handle overwrites now.
1776 KillSet.insert(V: ID);
1777 unsigned SpillLocIdx = VL.getSpillLocIdx(SpillLocation: *Loc);
1778 VarLoc::MachineLoc OldLoc = VL.Locs[SpillLocIdx];
1779 VarLoc UndefVL = VarLoc::CreateCopyLoc(OldVL: VL, OldML: OldLoc, NewReg: 0);
1780 LocIndices UndefLocIDs = VarLocIDs.insert(VL: UndefVL);
1781 Transfers.push_back(Elt: {.TransferInst: &MI, .LocationID: UndefLocIDs.back()});
1782 }
1783 }
1784 OpenRanges.erase(KillSet, VarLocIDs, Location: LocIndex::kSpillLocation);
1785 }
1786
1787 // Try to recognise spill and restore instructions that may create a new
1788 // variable location.
1789 if (isLocationSpill(MI, MF, Reg)) {
1790 TKind = TransferKind::TransferSpill;
1791 LLVM_DEBUG(dbgs() << "Recognized as spill: "; MI.dump(););
1792 LLVM_DEBUG(dbgs() << "Register: " << Reg << " " << printReg(Reg, TRI)
1793 << "\n");
1794 } else {
1795 if (!(Loc = isRestoreInstruction(MI, MF, Reg)))
1796 return;
1797 TKind = TransferKind::TransferRestore;
1798 LLVM_DEBUG(dbgs() << "Recognized as restore: "; MI.dump(););
1799 LLVM_DEBUG(dbgs() << "Register: " << Reg << " " << printReg(Reg, TRI)
1800 << "\n");
1801 }
1802 // Check if the register or spill location is the location of a debug value.
1803 auto TransferCandidates = OpenRanges.getEmptyVarLocRange();
1804 if (TKind == TransferKind::TransferSpill)
1805 TransferCandidates = OpenRanges.getRegisterVarLocs(Reg);
1806 else if (TKind == TransferKind::TransferRestore)
1807 TransferCandidates = OpenRanges.getSpillVarLocs();
1808 for (uint64_t ID : TransferCandidates) {
1809 LocIndex Idx = LocIndex::fromRawInteger(ID);
1810 const VarLoc &VL = VarLocIDs[Idx];
1811 unsigned LocIdx;
1812 if (TKind == TransferKind::TransferSpill) {
1813 assert(VL.usesReg(Reg) && "Broken VarLocSet?");
1814 LLVM_DEBUG(dbgs() << "Spilling Register " << printReg(Reg, TRI) << '('
1815 << VL.Var.getVariable()->getName() << ")\n");
1816 LocIdx = VL.getRegIdx(Reg);
1817 } else {
1818 assert(TKind == TransferKind::TransferRestore && VL.containsSpillLocs() &&
1819 "Broken VarLocSet?");
1820 if (!VL.usesSpillLoc(SpillLocation: *Loc))
1821 // The spill location is not the location of a debug value.
1822 continue;
1823 LLVM_DEBUG(dbgs() << "Restoring Register " << printReg(Reg, TRI) << '('
1824 << VL.Var.getVariable()->getName() << ")\n");
1825 LocIdx = VL.getSpillLocIdx(SpillLocation: *Loc);
1826 }
1827 VarLoc::MachineLoc MLoc = VL.Locs[LocIdx];
1828 insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, OldVarID: Idx, Kind: TKind,
1829 OldLoc: MLoc, NewReg: Reg);
1830 // FIXME: A comment should explain why it's correct to return early here,
1831 // if that is in fact correct.
1832 return;
1833 }
1834}
1835
1836/// If \p MI is a register copy instruction, that copies a previously tracked
1837/// value from one register to another register that is callee saved, we
1838/// create new DBG_VALUE instruction described with copy destination register.
1839void VarLocBasedLDV::transferRegisterCopy(MachineInstr &MI,
1840 OpenRangesSet &OpenRanges,
1841 VarLocMap &VarLocIDs,
1842 TransferMap &Transfers) {
1843 auto DestSrc = TII->isCopyLikeInstr(MI);
1844 if (!DestSrc)
1845 return;
1846
1847 const MachineOperand *DestRegOp = DestSrc->Destination;
1848 const MachineOperand *SrcRegOp = DestSrc->Source;
1849
1850 if (!DestRegOp->isDef())
1851 return;
1852
1853 auto isCalleeSavedReg = [&](Register Reg) {
1854 for (MCRegAliasIterator RAI(Reg, TRI, true); RAI.isValid(); ++RAI)
1855 if (CalleeSavedRegs.test(Idx: *RAI))
1856 return true;
1857 return false;
1858 };
1859
1860 Register SrcReg = SrcRegOp->getReg();
1861 Register DestReg = DestRegOp->getReg();
1862
1863 // We want to recognize instructions where destination register is callee
1864 // saved register. If register that could be clobbered by the call is
1865 // included, there would be a great chance that it is going to be clobbered
1866 // soon. It is more likely that previous register location, which is callee
1867 // saved, is going to stay unclobbered longer, even if it is killed.
1868 if (!isCalleeSavedReg(DestReg))
1869 return;
1870
1871 // Remember an entry value movement. If we encounter a new debug value of
1872 // a parameter describing only a moving of the value around, rather then
1873 // modifying it, we are still able to use the entry value if needed.
1874 if (isRegOtherThanSPAndFP(Op: *DestRegOp, MI, TRI)) {
1875 for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) {
1876 LocIndex Idx = LocIndex::fromRawInteger(ID);
1877 const VarLoc &VL = VarLocIDs[Idx];
1878 if (VL.isEntryValueBackupReg(Reg: SrcReg)) {
1879 LLVM_DEBUG(dbgs() << "Copy of the entry value: "; MI.dump(););
1880 VarLoc EntryValLocCopyBackup =
1881 VarLoc::CreateEntryCopyBackupLoc(MI: VL.MI, EntryExpr: VL.Expr, NewReg: DestReg);
1882 // Stop tracking the original entry value.
1883 OpenRanges.erase(VL);
1884
1885 // Start tracking the entry value copy.
1886 LocIndices EntryValCopyLocIDs = VarLocIDs.insert(VL: EntryValLocCopyBackup);
1887 OpenRanges.insert(VarLocIDs: EntryValCopyLocIDs, VL: EntryValLocCopyBackup);
1888 break;
1889 }
1890 }
1891 }
1892
1893 if (!SrcRegOp->isKill())
1894 return;
1895
1896 for (uint64_t ID : OpenRanges.getRegisterVarLocs(Reg: SrcReg)) {
1897 LocIndex Idx = LocIndex::fromRawInteger(ID);
1898 assert(VarLocIDs[Idx].usesReg(SrcReg) && "Broken VarLocSet?");
1899 VarLoc::MachineLocValue Loc;
1900 Loc.RegNo = SrcReg;
1901 VarLoc::MachineLoc MLoc{.Kind: VarLoc::MachineLocKind::RegisterKind, .Value: Loc};
1902 insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, OldVarID: Idx,
1903 Kind: TransferKind::TransferCopy, OldLoc: MLoc, NewReg: DestReg);
1904 // FIXME: A comment should explain why it's correct to return early here,
1905 // if that is in fact correct.
1906 return;
1907 }
1908}
1909
1910/// Terminate all open ranges at the end of the current basic block.
1911bool VarLocBasedLDV::transferTerminator(MachineBasicBlock *CurMBB,
1912 OpenRangesSet &OpenRanges,
1913 VarLocInMBB &OutLocs,
1914 const VarLocMap &VarLocIDs) {
1915 bool Changed = false;
1916 LLVM_DEBUG({
1917 VarVec VarLocs;
1918 OpenRanges.getUniqueVarLocs(VarLocs, VarLocIDs);
1919 for (VarLoc &VL : VarLocs) {
1920 // Copy OpenRanges to OutLocs, if not already present.
1921 dbgs() << "Add to OutLocs in MBB #" << CurMBB->getNumber() << ": ";
1922 VL.dump(TRI, TII);
1923 }
1924 });
1925 VarLocSet &VLS = getVarLocsInMBB(MBB: CurMBB, Locs&: OutLocs);
1926 Changed = VLS != OpenRanges.getVarLocs();
1927 // New OutLocs set may be different due to spill, restore or register
1928 // copy instruction processing.
1929 if (Changed)
1930 VLS = OpenRanges.getVarLocs();
1931 OpenRanges.clear();
1932 return Changed;
1933}
1934
1935/// Accumulate a mapping between each DILocalVariable fragment and other
1936/// fragments of that DILocalVariable which overlap. This reduces work during
1937/// the data-flow stage from "Find any overlapping fragments" to "Check if the
1938/// known-to-overlap fragments are present".
1939/// \param MI A previously unprocessed DEBUG_VALUE instruction to analyze for
1940/// fragment usage.
1941/// \param SeenFragments Map from DILocalVariable to all fragments of that
1942/// Variable which are known to exist.
1943/// \param OverlappingFragments The overlap map being constructed, from one
1944/// Var/Fragment pair to a vector of fragments known to overlap.
1945void VarLocBasedLDV::accumulateFragmentMap(MachineInstr &MI,
1946 VarToFragments &SeenFragments,
1947 OverlapMap &OverlappingFragments) {
1948 DebugVariable MIVar(MI.getDebugVariable(), MI.getDebugExpression(),
1949 MI.getDebugLoc()->getInlinedAt());
1950 FragmentInfo ThisFragment = MIVar.getFragmentOrDefault();
1951
1952 // If this is the first sighting of this variable, then we are guaranteed
1953 // there are currently no overlapping fragments either. Initialize the set
1954 // of seen fragments, record no overlaps for the current one, and return.
1955 auto SeenIt = SeenFragments.find(Val: MIVar.getVariable());
1956 if (SeenIt == SeenFragments.end()) {
1957 SmallSet<FragmentInfo, 4> OneFragment;
1958 OneFragment.insert(V: ThisFragment);
1959 SeenFragments.insert(KV: {MIVar.getVariable(), OneFragment});
1960
1961 OverlappingFragments.insert(KV: {{MIVar.getVariable(), ThisFragment}, {}});
1962 return;
1963 }
1964
1965 // If this particular Variable/Fragment pair already exists in the overlap
1966 // map, it has already been accounted for.
1967 auto IsInOLapMap =
1968 OverlappingFragments.insert(KV: {{MIVar.getVariable(), ThisFragment}, {}});
1969 if (!IsInOLapMap.second)
1970 return;
1971
1972 auto &ThisFragmentsOverlaps = IsInOLapMap.first->second;
1973 auto &AllSeenFragments = SeenIt->second;
1974
1975 // Otherwise, examine all other seen fragments for this variable, with "this"
1976 // fragment being a previously unseen fragment. Record any pair of
1977 // overlapping fragments.
1978 for (const auto &ASeenFragment : AllSeenFragments) {
1979 // Does this previously seen fragment overlap?
1980 if (DIExpression::fragmentsOverlap(A: ThisFragment, B: ASeenFragment)) {
1981 // Yes: Mark the current fragment as being overlapped.
1982 ThisFragmentsOverlaps.push_back(Elt: ASeenFragment);
1983 // Mark the previously seen fragment as being overlapped by the current
1984 // one.
1985 auto ASeenFragmentsOverlaps =
1986 OverlappingFragments.find(Val: {MIVar.getVariable(), ASeenFragment});
1987 assert(ASeenFragmentsOverlaps != OverlappingFragments.end() &&
1988 "Previously seen var fragment has no vector of overlaps");
1989 ASeenFragmentsOverlaps->second.push_back(Elt: ThisFragment);
1990 }
1991 }
1992
1993 AllSeenFragments.insert(V: ThisFragment);
1994}
1995
1996/// This routine creates OpenRanges.
1997void VarLocBasedLDV::process(MachineInstr &MI, OpenRangesSet &OpenRanges,
1998 VarLocMap &VarLocIDs, TransferMap &Transfers,
1999 InstToEntryLocMap &EntryValTransfers,
2000 RegDefToInstMap &RegSetInstrs) {
2001 if (!MI.isDebugInstr())
2002 LastNonDbgMI = &MI;
2003 transferDebugValue(MI, OpenRanges, VarLocIDs, EntryValTransfers,
2004 RegSetInstrs);
2005 transferRegisterDef(MI, OpenRanges, VarLocIDs, EntryValTransfers,
2006 RegSetInstrs);
2007 transferWasmDef(MI, OpenRanges, VarLocIDs);
2008 transferRegisterCopy(MI, OpenRanges, VarLocIDs, Transfers);
2009 transferSpillOrRestoreInst(MI, OpenRanges, VarLocIDs, Transfers);
2010}
2011
2012/// This routine joins the analysis results of all incoming edges in @MBB by
2013/// inserting a new DBG_VALUE instruction at the start of the @MBB - if the same
2014/// source variable in all the predecessors of @MBB reside in the same location.
2015bool VarLocBasedLDV::join(
2016 MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs,
2017 const VarLocMap &VarLocIDs,
2018 SmallPtrSet<const MachineBasicBlock *, 16> &Visited,
2019 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks) {
2020 LLVM_DEBUG(dbgs() << "join MBB: " << MBB.getNumber() << "\n");
2021
2022 VarLocSet InLocsT(Alloc); // Temporary incoming locations.
2023
2024 // For all predecessors of this MBB, find the set of VarLocs that
2025 // can be joined.
2026 int NumVisited = 0;
2027 for (auto *p : MBB.predecessors()) {
2028 // Ignore backedges if we have not visited the predecessor yet. As the
2029 // predecessor hasn't yet had locations propagated into it, most locations
2030 // will not yet be valid, so treat them as all being uninitialized and
2031 // potentially valid. If a location guessed to be correct here is
2032 // invalidated later, we will remove it when we revisit this block.
2033 if (!Visited.count(Ptr: p)) {
2034 LLVM_DEBUG(dbgs() << " ignoring unvisited pred MBB: " << p->getNumber()
2035 << "\n");
2036 continue;
2037 }
2038 auto OL = OutLocs.find(Val: p);
2039 // Join is null in case of empty OutLocs from any of the pred.
2040 if (OL == OutLocs.end())
2041 return false;
2042
2043 // Just copy over the Out locs to incoming locs for the first visited
2044 // predecessor, and for all other predecessors join the Out locs.
2045 VarLocSet &OutLocVLS = *OL->second;
2046 if (!NumVisited)
2047 InLocsT = OutLocVLS;
2048 else
2049 InLocsT &= OutLocVLS;
2050
2051 LLVM_DEBUG({
2052 if (!InLocsT.empty()) {
2053 VarVec VarLocs;
2054 collectAllVarLocs(VarLocs, InLocsT, VarLocIDs);
2055 for (const VarLoc &VL : VarLocs)
2056 dbgs() << " gathered candidate incoming var: "
2057 << VL.Var.getVariable()->getName() << "\n";
2058 }
2059 });
2060
2061 NumVisited++;
2062 }
2063
2064 // Filter out DBG_VALUES that are out of scope.
2065 VarLocSet KillSet(Alloc);
2066 bool IsArtificial = ArtificialBlocks.count(Ptr: &MBB);
2067 if (!IsArtificial) {
2068 for (uint64_t ID : InLocsT) {
2069 LocIndex Idx = LocIndex::fromRawInteger(ID);
2070 if (!VarLocIDs[Idx].dominates(LS, MBB)) {
2071 KillSet.set(ID);
2072 LLVM_DEBUG({
2073 auto Name = VarLocIDs[Idx].Var.getVariable()->getName();
2074 dbgs() << " killing " << Name << ", it doesn't dominate MBB\n";
2075 });
2076 }
2077 }
2078 }
2079 InLocsT.intersectWithComplement(Other: KillSet);
2080
2081 // As we are processing blocks in reverse post-order we
2082 // should have processed at least one predecessor, unless it
2083 // is the entry block which has no predecessor.
2084 assert((NumVisited || MBB.pred_empty()) &&
2085 "Should have processed at least one predecessor");
2086
2087 VarLocSet &ILS = getVarLocsInMBB(MBB: &MBB, Locs&: InLocs);
2088 bool Changed = false;
2089 if (ILS != InLocsT) {
2090 ILS = InLocsT;
2091 Changed = true;
2092 }
2093
2094 return Changed;
2095}
2096
2097void VarLocBasedLDV::flushPendingLocs(VarLocInMBB &PendingInLocs,
2098 VarLocMap &VarLocIDs) {
2099 // PendingInLocs records all locations propagated into blocks, which have
2100 // not had DBG_VALUE insts created. Go through and create those insts now.
2101 for (auto &Iter : PendingInLocs) {
2102 // Map is keyed on a constant pointer, unwrap it so we can insert insts.
2103 auto &MBB = const_cast<MachineBasicBlock &>(*Iter.first);
2104 VarLocSet &Pending = *Iter.second;
2105
2106 SmallVector<VarLoc, 32> VarLocs;
2107 collectAllVarLocs(Collected&: VarLocs, CollectFrom: Pending, VarLocIDs);
2108
2109 for (VarLoc DiffIt : VarLocs) {
2110 // The ID location is live-in to MBB -- work out what kind of machine
2111 // location it is and create a DBG_VALUE.
2112 if (DiffIt.isEntryBackupLoc())
2113 continue;
2114 MachineInstr *MI = DiffIt.BuildDbgValue(MF&: *MBB.getParent());
2115 MBB.insert(I: MBB.instr_begin(), M: MI);
2116
2117 (void)MI;
2118 LLVM_DEBUG(dbgs() << "Inserted: "; MI->dump(););
2119 }
2120 }
2121}
2122
2123bool VarLocBasedLDV::isEntryValueCandidate(
2124 const MachineInstr &MI, const DefinedRegsSet &DefinedRegs) const {
2125 assert(MI.isDebugValue() && "This must be DBG_VALUE.");
2126
2127 // TODO: Add support for local variables that are expressed in terms of
2128 // parameters entry values.
2129 // TODO: Add support for modified arguments that can be expressed
2130 // by using its entry value.
2131 auto *DIVar = MI.getDebugVariable();
2132 if (!DIVar->isParameter())
2133 return false;
2134
2135 // Do not consider parameters that belong to an inlined function.
2136 if (MI.getDebugLoc()->getInlinedAt())
2137 return false;
2138
2139 // Only consider parameters that are described using registers. Parameters
2140 // that are passed on the stack are not yet supported, so ignore debug
2141 // values that are described by the frame or stack pointer.
2142 if (!isRegOtherThanSPAndFP(Op: MI.getDebugOperand(Index: 0), MI, TRI))
2143 return false;
2144
2145 // If a parameter's value has been propagated from the caller, then the
2146 // parameter's DBG_VALUE may be described using a register defined by some
2147 // instruction in the entry block, in which case we shouldn't create an
2148 // entry value.
2149 if (DefinedRegs.count(V: MI.getDebugOperand(Index: 0).getReg()))
2150 return false;
2151
2152 // TODO: Add support for parameters that have a pre-existing debug expressions
2153 // (e.g. fragments).
2154 // A simple deref expression is equivalent to an indirect debug value.
2155 const DIExpression *Expr = MI.getDebugExpression();
2156 if (Expr->getNumElements() > 0 && !Expr->isDeref())
2157 return false;
2158
2159 return true;
2160}
2161
2162/// Collect all register defines (including aliases) for the given instruction.
2163static void collectRegDefs(const MachineInstr &MI, DefinedRegsSet &Regs,
2164 const TargetRegisterInfo *TRI) {
2165 for (const MachineOperand &MO : MI.all_defs()) {
2166 if (MO.getReg() && MO.getReg().isPhysical()) {
2167 Regs.insert(V: MO.getReg());
2168 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
2169 Regs.insert(V: *AI);
2170 }
2171 }
2172}
2173
2174/// This routine records the entry values of function parameters. The values
2175/// could be used as backup values. If we loose the track of some unmodified
2176/// parameters, the backup values will be used as a primary locations.
2177void VarLocBasedLDV::recordEntryValue(const MachineInstr &MI,
2178 const DefinedRegsSet &DefinedRegs,
2179 OpenRangesSet &OpenRanges,
2180 VarLocMap &VarLocIDs) {
2181 if (TPC) {
2182 auto &TM = TPC->getTM<TargetMachine>();
2183 if (!TM.Options.ShouldEmitDebugEntryValues())
2184 return;
2185 }
2186
2187 DebugVariable V(MI.getDebugVariable(), MI.getDebugExpression(),
2188 MI.getDebugLoc()->getInlinedAt());
2189
2190 if (!isEntryValueCandidate(MI, DefinedRegs) ||
2191 OpenRanges.getEntryValueBackup(Var: V))
2192 return;
2193
2194 LLVM_DEBUG(dbgs() << "Creating the backup entry location: "; MI.dump(););
2195
2196 // Create the entry value and use it as a backup location until it is
2197 // valid. It is valid until a parameter is not changed.
2198 DIExpression *NewExpr =
2199 DIExpression::prepend(Expr: MI.getDebugExpression(), Flags: DIExpression::EntryValue);
2200 VarLoc EntryValLocAsBackup = VarLoc::CreateEntryBackupLoc(MI, EntryExpr: NewExpr);
2201 LocIndices EntryValLocIDs = VarLocIDs.insert(VL: EntryValLocAsBackup);
2202 OpenRanges.insert(VarLocIDs: EntryValLocIDs, VL: EntryValLocAsBackup);
2203}
2204
2205/// Calculate the liveness information for the given machine function and
2206/// extend ranges across basic blocks.
2207bool VarLocBasedLDV::ExtendRanges(MachineFunction &MF,
2208 MachineDominatorTree *DomTree,
2209 TargetPassConfig *TPC, unsigned InputBBLimit,
2210 unsigned InputDbgValLimit) {
2211 (void)DomTree;
2212 LLVM_DEBUG(dbgs() << "\nDebug Range Extension: " << MF.getName() << "\n");
2213
2214 if (!MF.getFunction().getSubprogram())
2215 // VarLocBaseLDV will already have removed all DBG_VALUEs.
2216 return false;
2217
2218 // Skip functions from NoDebug compilation units.
2219 if (MF.getFunction().getSubprogram()->getUnit()->getEmissionKind() ==
2220 DICompileUnit::NoDebug)
2221 return false;
2222
2223 TRI = MF.getSubtarget().getRegisterInfo();
2224 TII = MF.getSubtarget().getInstrInfo();
2225 TFI = MF.getSubtarget().getFrameLowering();
2226 TFI->getCalleeSaves(MF, SavedRegs&: CalleeSavedRegs);
2227 this->TPC = TPC;
2228 LS.initialize(MF);
2229
2230 bool Changed = false;
2231 bool OLChanged = false;
2232 bool MBBJoined = false;
2233
2234 VarLocMap VarLocIDs; // Map VarLoc<>unique ID for use in bitvectors.
2235 OverlapMap OverlapFragments; // Map of overlapping variable fragments.
2236 OpenRangesSet OpenRanges(Alloc, OverlapFragments);
2237 // Ranges that are open until end of bb.
2238 VarLocInMBB OutLocs; // Ranges that exist beyond bb.
2239 VarLocInMBB InLocs; // Ranges that are incoming after joining.
2240 TransferMap Transfers; // DBG_VALUEs associated with transfers (such as
2241 // spills, copies and restores).
2242 // Map responsible MI to attached Transfer emitted from Backup Entry Value.
2243 InstToEntryLocMap EntryValTransfers;
2244 // Map a Register to the last MI which clobbered it.
2245 RegDefToInstMap RegSetInstrs;
2246
2247 VarToFragments SeenFragments;
2248
2249 // Blocks which are artificial, i.e. blocks which exclusively contain
2250 // instructions without locations, or with line 0 locations.
2251 SmallPtrSet<const MachineBasicBlock *, 16> ArtificialBlocks;
2252
2253 DenseMap<unsigned int, MachineBasicBlock *> OrderToBB;
2254 DenseMap<MachineBasicBlock *, unsigned int> BBToOrder;
2255 std::priority_queue<unsigned int, std::vector<unsigned int>,
2256 std::greater<unsigned int>>
2257 Worklist;
2258 std::priority_queue<unsigned int, std::vector<unsigned int>,
2259 std::greater<unsigned int>>
2260 Pending;
2261
2262 // Set of register defines that are seen when traversing the entry block
2263 // looking for debug entry value candidates.
2264 DefinedRegsSet DefinedRegs;
2265
2266 // Only in the case of entry MBB collect DBG_VALUEs representing
2267 // function parameters in order to generate debug entry values for them.
2268 MachineBasicBlock &First_MBB = *(MF.begin());
2269 for (auto &MI : First_MBB) {
2270 collectRegDefs(MI, Regs&: DefinedRegs, TRI);
2271 if (MI.isDebugValue())
2272 recordEntryValue(MI, DefinedRegs, OpenRanges, VarLocIDs);
2273 }
2274
2275 // Initialize per-block structures and scan for fragment overlaps.
2276 for (auto &MBB : MF)
2277 for (auto &MI : MBB)
2278 if (MI.isDebugValue())
2279 accumulateFragmentMap(MI, SeenFragments, OverlappingFragments&: OverlapFragments);
2280
2281 auto hasNonArtificialLocation = [](const MachineInstr &MI) -> bool {
2282 if (const DebugLoc &DL = MI.getDebugLoc())
2283 return DL.getLine() != 0;
2284 return false;
2285 };
2286 for (auto &MBB : MF)
2287 if (none_of(Range: MBB.instrs(), P: hasNonArtificialLocation))
2288 ArtificialBlocks.insert(Ptr: &MBB);
2289
2290 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs,
2291 "OutLocs after initialization", dbgs()));
2292
2293 ReversePostOrderTraversal<MachineFunction *> RPOT(&MF);
2294 unsigned int RPONumber = 0;
2295 for (MachineBasicBlock *MBB : RPOT) {
2296 OrderToBB[RPONumber] = MBB;
2297 BBToOrder[MBB] = RPONumber;
2298 Worklist.push(x: RPONumber);
2299 ++RPONumber;
2300 }
2301
2302 if (RPONumber > InputBBLimit) {
2303 unsigned NumInputDbgValues = 0;
2304 for (auto &MBB : MF)
2305 for (auto &MI : MBB)
2306 if (MI.isDebugValue())
2307 ++NumInputDbgValues;
2308 if (NumInputDbgValues > InputDbgValLimit) {
2309 LLVM_DEBUG(dbgs() << "Disabling VarLocBasedLDV: " << MF.getName()
2310 << " has " << RPONumber << " basic blocks and "
2311 << NumInputDbgValues
2312 << " input DBG_VALUEs, exceeding limits.\n");
2313 return false;
2314 }
2315 }
2316
2317 // This is a standard "union of predecessor outs" dataflow problem.
2318 // To solve it, we perform join() and process() using the two worklist method
2319 // until the ranges converge.
2320 // Ranges have converged when both worklists are empty.
2321 SmallPtrSet<const MachineBasicBlock *, 16> Visited;
2322 while (!Worklist.empty() || !Pending.empty()) {
2323 // We track what is on the pending worklist to avoid inserting the same
2324 // thing twice. We could avoid this with a custom priority queue, but this
2325 // is probably not worth it.
2326 SmallPtrSet<MachineBasicBlock *, 16> OnPending;
2327 LLVM_DEBUG(dbgs() << "Processing Worklist\n");
2328 while (!Worklist.empty()) {
2329 MachineBasicBlock *MBB = OrderToBB[Worklist.top()];
2330 Worklist.pop();
2331 MBBJoined = join(MBB&: *MBB, OutLocs, InLocs, VarLocIDs, Visited,
2332 ArtificialBlocks);
2333 MBBJoined |= Visited.insert(Ptr: MBB).second;
2334 if (MBBJoined) {
2335 MBBJoined = false;
2336 Changed = true;
2337 // Now that we have started to extend ranges across BBs we need to
2338 // examine spill, copy and restore instructions to see whether they
2339 // operate with registers that correspond to user variables.
2340 // First load any pending inlocs.
2341 OpenRanges.insertFromLocSet(ToLoad: getVarLocsInMBB(MBB, Locs&: InLocs), Map: VarLocIDs);
2342 LastNonDbgMI = nullptr;
2343 RegSetInstrs.clear();
2344 for (auto &MI : *MBB)
2345 process(MI, OpenRanges, VarLocIDs, Transfers, EntryValTransfers,
2346 RegSetInstrs);
2347 OLChanged |= transferTerminator(CurMBB: MBB, OpenRanges, OutLocs, VarLocIDs);
2348
2349 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs,
2350 "OutLocs after propagating", dbgs()));
2351 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs,
2352 "InLocs after propagating", dbgs()));
2353
2354 if (OLChanged) {
2355 OLChanged = false;
2356 for (auto *s : MBB->successors())
2357 if (OnPending.insert(Ptr: s).second) {
2358 Pending.push(x: BBToOrder[s]);
2359 }
2360 }
2361 }
2362 }
2363 Worklist.swap(pq&: Pending);
2364 // At this point, pending must be empty, since it was just the empty
2365 // worklist
2366 assert(Pending.empty() && "Pending should be empty");
2367 }
2368
2369 // Add any DBG_VALUE instructions created by location transfers.
2370 for (auto &TR : Transfers) {
2371 assert(!TR.TransferInst->isTerminator() &&
2372 "Cannot insert DBG_VALUE after terminator");
2373 MachineBasicBlock *MBB = TR.TransferInst->getParent();
2374 const VarLoc &VL = VarLocIDs[TR.LocationID];
2375 MachineInstr *MI = VL.BuildDbgValue(MF);
2376 MBB->insertAfterBundle(I: TR.TransferInst->getIterator(), MI);
2377 }
2378 Transfers.clear();
2379
2380 // Add DBG_VALUEs created using Backup Entry Value location.
2381 for (auto &TR : EntryValTransfers) {
2382 MachineInstr *TRInst = const_cast<MachineInstr *>(TR.first);
2383 assert(!TRInst->isTerminator() &&
2384 "Cannot insert DBG_VALUE after terminator");
2385 MachineBasicBlock *MBB = TRInst->getParent();
2386 const VarLoc &VL = VarLocIDs[TR.second];
2387 MachineInstr *MI = VL.BuildDbgValue(MF);
2388 MBB->insertAfterBundle(I: TRInst->getIterator(), MI);
2389 }
2390 EntryValTransfers.clear();
2391
2392 // Deferred inlocs will not have had any DBG_VALUE insts created; do
2393 // that now.
2394 flushPendingLocs(PendingInLocs&: InLocs, VarLocIDs);
2395
2396 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs, "Final OutLocs", dbgs()));
2397 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs, "Final InLocs", dbgs()));
2398 return Changed;
2399}
2400
2401LDVImpl *
2402llvm::makeVarLocBasedLiveDebugValues()
2403{
2404 return new VarLocBasedLDV();
2405}
2406

source code of llvm/lib/CodeGen/LiveDebugValues/VarLocBasedImpl.cpp