1//===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements methods that make it really easy to deal with intrinsic
10// functions.
11//
12// All intrinsic function calls are instances of the call instruction, so these
13// are all subclasses of the CallInst class. Note that none of these classes
14// has state or virtual methods, which is an important part of this gross/neat
15// hack working.
16//
17// In some cases, arguments to intrinsics need to be generic and are defined as
18// type pointer to empty struct { }*. To access the real item of interest the
19// cast instruction needs to be stripped away.
20//
21//===----------------------------------------------------------------------===//
22
23#include "llvm/IR/IntrinsicInst.h"
24#include "llvm/ADT/StringSwitch.h"
25#include "llvm/IR/Constants.h"
26#include "llvm/IR/DebugInfoMetadata.h"
27#include "llvm/IR/Metadata.h"
28#include "llvm/IR/Module.h"
29#include "llvm/IR/Operator.h"
30#include "llvm/IR/PatternMatch.h"
31#include "llvm/IR/Statepoint.h"
32#include <optional>
33
34using namespace llvm;
35
36bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
37 switch (IID) {
38 case Intrinsic::objc_autorelease:
39 case Intrinsic::objc_autoreleasePoolPop:
40 case Intrinsic::objc_autoreleasePoolPush:
41 case Intrinsic::objc_autoreleaseReturnValue:
42 case Intrinsic::objc_copyWeak:
43 case Intrinsic::objc_destroyWeak:
44 case Intrinsic::objc_initWeak:
45 case Intrinsic::objc_loadWeak:
46 case Intrinsic::objc_loadWeakRetained:
47 case Intrinsic::objc_moveWeak:
48 case Intrinsic::objc_release:
49 case Intrinsic::objc_retain:
50 case Intrinsic::objc_retainAutorelease:
51 case Intrinsic::objc_retainAutoreleaseReturnValue:
52 case Intrinsic::objc_retainAutoreleasedReturnValue:
53 case Intrinsic::objc_retainBlock:
54 case Intrinsic::objc_storeStrong:
55 case Intrinsic::objc_storeWeak:
56 case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
57 case Intrinsic::objc_retainedObject:
58 case Intrinsic::objc_unretainedObject:
59 case Intrinsic::objc_unretainedPointer:
60 case Intrinsic::objc_retain_autorelease:
61 case Intrinsic::objc_sync_enter:
62 case Intrinsic::objc_sync_exit:
63 return true;
64 default:
65 return false;
66 }
67}
68
69//===----------------------------------------------------------------------===//
70/// DbgVariableIntrinsic - This is the common base class for debug info
71/// intrinsics for variables.
72///
73
74iterator_range<location_op_iterator> RawLocationWrapper::location_ops() const {
75 Metadata *MD = getRawLocation();
76 assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
77 // If operand is ValueAsMetadata, return a range over just that operand.
78 if (auto *VAM = dyn_cast<ValueAsMetadata>(Val: MD)) {
79 return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
80 }
81 // If operand is DIArgList, return a range over its args.
82 if (auto *AL = dyn_cast<DIArgList>(Val: MD))
83 return {location_op_iterator(AL->args_begin()),
84 location_op_iterator(AL->args_end())};
85 // Operand must be an empty metadata tuple, so return empty iterator.
86 return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
87 location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
88}
89
90iterator_range<location_op_iterator>
91DbgVariableIntrinsic::location_ops() const {
92 return getWrappedLocation().location_ops();
93}
94
95Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
96 return getWrappedLocation().getVariableLocationOp(OpIdx);
97}
98
99Value *RawLocationWrapper::getVariableLocationOp(unsigned OpIdx) const {
100 Metadata *MD = getRawLocation();
101 assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
102 if (auto *AL = dyn_cast<DIArgList>(Val: MD))
103 return AL->getArgs()[OpIdx]->getValue();
104 if (isa<MDNode>(Val: MD))
105 return nullptr;
106 assert(
107 isa<ValueAsMetadata>(MD) &&
108 "Attempted to get location operand from DbgVariableIntrinsic with none.");
109 auto *V = cast<ValueAsMetadata>(Val: MD);
110 assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
111 "single location operand.");
112 return V->getValue();
113}
114
115static ValueAsMetadata *getAsMetadata(Value *V) {
116 return isa<MetadataAsValue>(Val: V) ? dyn_cast<ValueAsMetadata>(
117 Val: cast<MetadataAsValue>(Val: V)->getMetadata())
118 : ValueAsMetadata::get(V);
119}
120
121void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
122 Value *NewValue) {
123 // If OldValue is used as the address part of a dbg.assign intrinsic replace
124 // it with NewValue and return true.
125 auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool {
126 auto *DAI = dyn_cast<DbgAssignIntrinsic>(Val: this);
127 if (!DAI || OldValue != DAI->getAddress())
128 return false;
129 DAI->setAddress(NewValue);
130 return true;
131 };
132 bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress();
133 (void)DbgAssignAddrReplaced;
134
135 assert(NewValue && "Values must be non-null");
136 auto Locations = location_ops();
137 auto OldIt = find(Range&: Locations, Val: OldValue);
138 if (OldIt == Locations.end()) {
139 assert(DbgAssignAddrReplaced &&
140 "OldValue must be dbg.assign addr if unused in DIArgList");
141 return;
142 }
143
144 assert(OldIt != Locations.end() && "OldValue must be a current location");
145 if (!hasArgList()) {
146 Value *NewOperand = isa<MetadataAsValue>(Val: NewValue)
147 ? NewValue
148 : MetadataAsValue::get(
149 Context&: getContext(), MD: ValueAsMetadata::get(V: NewValue));
150 return setArgOperand(i: 0, v: NewOperand);
151 }
152 SmallVector<ValueAsMetadata *, 4> MDs;
153 ValueAsMetadata *NewOperand = getAsMetadata(V: NewValue);
154 for (auto *VMD : Locations)
155 MDs.push_back(Elt: VMD == *OldIt ? NewOperand : getAsMetadata(V: VMD));
156 setArgOperand(
157 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
158}
159void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
160 Value *NewValue) {
161 assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
162 if (!hasArgList()) {
163 Value *NewOperand = isa<MetadataAsValue>(Val: NewValue)
164 ? NewValue
165 : MetadataAsValue::get(
166 Context&: getContext(), MD: ValueAsMetadata::get(V: NewValue));
167 return setArgOperand(i: 0, v: NewOperand);
168 }
169 SmallVector<ValueAsMetadata *, 4> MDs;
170 ValueAsMetadata *NewOperand = getAsMetadata(V: NewValue);
171 for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
172 MDs.push_back(Elt: Idx == OpIdx ? NewOperand
173 : getAsMetadata(V: getVariableLocationOp(OpIdx: Idx)));
174 setArgOperand(
175 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
176}
177
178void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
179 DIExpression *NewExpr) {
180 assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
181 NewValues.size()) &&
182 "NewExpr for debug variable intrinsic does not reference every "
183 "location operand.");
184 assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
185 setArgOperand(i: 2, v: MetadataAsValue::get(Context&: getContext(), MD: NewExpr));
186 SmallVector<ValueAsMetadata *, 4> MDs;
187 for (auto *VMD : location_ops())
188 MDs.push_back(Elt: getAsMetadata(V: VMD));
189 for (auto *VMD : NewValues)
190 MDs.push_back(Elt: getAsMetadata(V: VMD));
191 setArgOperand(
192 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
193}
194
195std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
196 if (auto Fragment = getExpression()->getFragmentInfo())
197 return Fragment->SizeInBits;
198 return getVariable()->getSizeInBits();
199}
200
201Value *DbgAssignIntrinsic::getAddress() const {
202 auto *MD = getRawAddress();
203 if (auto *V = dyn_cast<ValueAsMetadata>(Val: MD))
204 return V->getValue();
205
206 // When the value goes to null, it gets replaced by an empty MDNode.
207 assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode");
208 return nullptr;
209}
210
211void DbgAssignIntrinsic::setAssignId(DIAssignID *New) {
212 setOperand(i: OpAssignID, v: MetadataAsValue::get(Context&: getContext(), MD: New));
213}
214
215void DbgAssignIntrinsic::setAddress(Value *V) {
216 setOperand(i: OpAddress,
217 v: MetadataAsValue::get(Context&: getContext(), MD: ValueAsMetadata::get(V)));
218}
219
220void DbgAssignIntrinsic::setKillAddress() {
221 if (isKillAddress())
222 return;
223 setAddress(UndefValue::get(T: getAddress()->getType()));
224}
225
226bool DbgAssignIntrinsic::isKillAddress() const {
227 Value *Addr = getAddress();
228 return !Addr || isa<UndefValue>(Val: Addr);
229}
230
231void DbgAssignIntrinsic::setValue(Value *V) {
232 setOperand(i: OpValue,
233 v: MetadataAsValue::get(Context&: getContext(), MD: ValueAsMetadata::get(V)));
234}
235
236int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable,
237 StringRef Name) {
238 assert(Name.starts_with("llvm.") && "Unexpected intrinsic prefix");
239
240 // Do successive binary searches of the dotted name components. For
241 // "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of
242 // intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then
243 // "llvm.gc.experimental.statepoint", and then we will stop as the range is
244 // size 1. During the search, we can skip the prefix that we already know is
245 // identical. By using strncmp we consider names with differing suffixes to
246 // be part of the equal range.
247 size_t CmpEnd = 4; // Skip the "llvm" component.
248 const char *const *Low = NameTable.begin();
249 const char *const *High = NameTable.end();
250 const char *const *LastLow = Low;
251 while (CmpEnd < Name.size() && High - Low > 0) {
252 size_t CmpStart = CmpEnd;
253 CmpEnd = Name.find(C: '.', From: CmpStart + 1);
254 CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd;
255 auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) {
256 return strncmp(s1: LHS + CmpStart, s2: RHS + CmpStart, n: CmpEnd - CmpStart) < 0;
257 };
258 LastLow = Low;
259 std::tie(args&: Low, args&: High) = std::equal_range(first: Low, last: High, val: Name.data(), comp: Cmp);
260 }
261 if (High - Low > 0)
262 LastLow = Low;
263
264 if (LastLow == NameTable.end())
265 return -1;
266 StringRef NameFound = *LastLow;
267 if (Name == NameFound ||
268 (Name.starts_with(Prefix: NameFound) && Name[NameFound.size()] == '.'))
269 return LastLow - NameTable.begin();
270 return -1;
271}
272
273ConstantInt *InstrProfCntrInstBase::getNumCounters() const {
274 if (InstrProfValueProfileInst::classof(I: this))
275 llvm_unreachable("InstrProfValueProfileInst does not have counters!");
276 return cast<ConstantInt>(Val: const_cast<Value *>(getArgOperand(i: 2)));
277}
278
279ConstantInt *InstrProfCntrInstBase::getIndex() const {
280 if (InstrProfValueProfileInst::classof(I: this))
281 llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
282 return cast<ConstantInt>(Val: const_cast<Value *>(getArgOperand(i: 3)));
283}
284
285Value *InstrProfIncrementInst::getStep() const {
286 if (InstrProfIncrementInstStep::classof(I: this)) {
287 return const_cast<Value *>(getArgOperand(i: 4));
288 }
289 const Module *M = getModule();
290 LLVMContext &Context = M->getContext();
291 return ConstantInt::get(Ty: Type::getInt64Ty(C&: Context), V: 1);
292}
293
294std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
295 unsigned NumOperands = arg_size();
296 Metadata *MD = nullptr;
297 auto *MAV = dyn_cast<MetadataAsValue>(Val: getArgOperand(i: NumOperands - 2));
298 if (MAV)
299 MD = MAV->getMetadata();
300 if (!MD || !isa<MDString>(Val: MD))
301 return std::nullopt;
302 return convertStrToRoundingMode(cast<MDString>(Val: MD)->getString());
303}
304
305std::optional<fp::ExceptionBehavior>
306ConstrainedFPIntrinsic::getExceptionBehavior() const {
307 unsigned NumOperands = arg_size();
308 Metadata *MD = nullptr;
309 auto *MAV = dyn_cast<MetadataAsValue>(Val: getArgOperand(i: NumOperands - 1));
310 if (MAV)
311 MD = MAV->getMetadata();
312 if (!MD || !isa<MDString>(Val: MD))
313 return std::nullopt;
314 return convertStrToExceptionBehavior(cast<MDString>(Val: MD)->getString());
315}
316
317bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
318 std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
319 if (Except) {
320 if (*Except != fp::ebIgnore)
321 return false;
322 }
323
324 std::optional<RoundingMode> Rounding = getRoundingMode();
325 if (Rounding) {
326 if (*Rounding != RoundingMode::NearestTiesToEven)
327 return false;
328 }
329
330 return true;
331}
332
333static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
334 Metadata *MD = cast<MetadataAsValue>(Val: Op)->getMetadata();
335 if (!MD || !isa<MDString>(Val: MD))
336 return FCmpInst::BAD_FCMP_PREDICATE;
337 return StringSwitch<FCmpInst::Predicate>(cast<MDString>(Val: MD)->getString())
338 .Case(S: "oeq", Value: FCmpInst::FCMP_OEQ)
339 .Case(S: "ogt", Value: FCmpInst::FCMP_OGT)
340 .Case(S: "oge", Value: FCmpInst::FCMP_OGE)
341 .Case(S: "olt", Value: FCmpInst::FCMP_OLT)
342 .Case(S: "ole", Value: FCmpInst::FCMP_OLE)
343 .Case(S: "one", Value: FCmpInst::FCMP_ONE)
344 .Case(S: "ord", Value: FCmpInst::FCMP_ORD)
345 .Case(S: "uno", Value: FCmpInst::FCMP_UNO)
346 .Case(S: "ueq", Value: FCmpInst::FCMP_UEQ)
347 .Case(S: "ugt", Value: FCmpInst::FCMP_UGT)
348 .Case(S: "uge", Value: FCmpInst::FCMP_UGE)
349 .Case(S: "ult", Value: FCmpInst::FCMP_ULT)
350 .Case(S: "ule", Value: FCmpInst::FCMP_ULE)
351 .Case(S: "une", Value: FCmpInst::FCMP_UNE)
352 .Default(Value: FCmpInst::BAD_FCMP_PREDICATE);
353}
354
355FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
356 return getFPPredicateFromMD(Op: getArgOperand(i: 2));
357}
358
359bool ConstrainedFPIntrinsic::isUnaryOp() const {
360 switch (getIntrinsicID()) {
361 default:
362 return false;
363#define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \
364 case Intrinsic::INTRINSIC: \
365 return NARG == 1;
366#include "llvm/IR/ConstrainedOps.def"
367 }
368}
369
370bool ConstrainedFPIntrinsic::isTernaryOp() const {
371 switch (getIntrinsicID()) {
372 default:
373 return false;
374#define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \
375 case Intrinsic::INTRINSIC: \
376 return NARG == 3;
377#include "llvm/IR/ConstrainedOps.def"
378 }
379}
380
381bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
382 switch (I->getIntrinsicID()) {
383#define INSTRUCTION(NAME, NARGS, ROUND_MODE, INTRINSIC) \
384 case Intrinsic::INTRINSIC:
385#include "llvm/IR/ConstrainedOps.def"
386 return true;
387 default:
388 return false;
389 }
390}
391
392ElementCount VPIntrinsic::getStaticVectorLength() const {
393 auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
394 const auto *VT = cast<VectorType>(Val: T);
395 auto ElemCount = VT->getElementCount();
396 return ElemCount;
397 };
398
399 Value *VPMask = getMaskParam();
400 if (!VPMask) {
401 assert((getIntrinsicID() == Intrinsic::vp_merge ||
402 getIntrinsicID() == Intrinsic::vp_select) &&
403 "Unexpected VP intrinsic without mask operand");
404 return GetVectorLengthOfType(getType());
405 }
406 return GetVectorLengthOfType(VPMask->getType());
407}
408
409Value *VPIntrinsic::getMaskParam() const {
410 if (auto MaskPos = getMaskParamPos(IntrinsicID: getIntrinsicID()))
411 return getArgOperand(i: *MaskPos);
412 return nullptr;
413}
414
415void VPIntrinsic::setMaskParam(Value *NewMask) {
416 auto MaskPos = getMaskParamPos(IntrinsicID: getIntrinsicID());
417 setArgOperand(i: *MaskPos, v: NewMask);
418}
419
420Value *VPIntrinsic::getVectorLengthParam() const {
421 if (auto EVLPos = getVectorLengthParamPos(IntrinsicID: getIntrinsicID()))
422 return getArgOperand(i: *EVLPos);
423 return nullptr;
424}
425
426void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
427 auto EVLPos = getVectorLengthParamPos(IntrinsicID: getIntrinsicID());
428 setArgOperand(i: *EVLPos, v: NewEVL);
429}
430
431std::optional<unsigned>
432VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
433 switch (IntrinsicID) {
434 default:
435 return std::nullopt;
436
437#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
438 case Intrinsic::VPID: \
439 return MASKPOS;
440#include "llvm/IR/VPIntrinsics.def"
441 }
442}
443
444std::optional<unsigned>
445VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
446 switch (IntrinsicID) {
447 default:
448 return std::nullopt;
449
450#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
451 case Intrinsic::VPID: \
452 return VLENPOS;
453#include "llvm/IR/VPIntrinsics.def"
454 }
455}
456
457/// \return the alignment of the pointer used by this load/store/gather or
458/// scatter.
459MaybeAlign VPIntrinsic::getPointerAlignment() const {
460 std::optional<unsigned> PtrParamOpt =
461 getMemoryPointerParamPos(getIntrinsicID());
462 assert(PtrParamOpt && "no pointer argument!");
463 return getParamAlign(ArgNo: *PtrParamOpt);
464}
465
466/// \return The pointer operand of this load,store, gather or scatter.
467Value *VPIntrinsic::getMemoryPointerParam() const {
468 if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
469 return getArgOperand(i: *PtrParamOpt);
470 return nullptr;
471}
472
473std::optional<unsigned>
474VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
475 switch (VPID) {
476 default:
477 break;
478#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
479#define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
480#define END_REGISTER_VP_INTRINSIC(VPID) break;
481#include "llvm/IR/VPIntrinsics.def"
482 }
483 return std::nullopt;
484}
485
486/// \return The data (payload) operand of this store or scatter.
487Value *VPIntrinsic::getMemoryDataParam() const {
488 auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
489 if (!DataParamOpt)
490 return nullptr;
491 return getArgOperand(i: *DataParamOpt);
492}
493
494std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
495 switch (VPID) {
496 default:
497 break;
498#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
499#define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
500#define END_REGISTER_VP_INTRINSIC(VPID) break;
501#include "llvm/IR/VPIntrinsics.def"
502 }
503 return std::nullopt;
504}
505
506constexpr bool isVPIntrinsic(Intrinsic::ID ID) {
507 switch (ID) {
508 default:
509 break;
510#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
511 case Intrinsic::VPID: \
512 return true;
513#include "llvm/IR/VPIntrinsics.def"
514 }
515 return false;
516}
517
518bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
519 return ::isVPIntrinsic(ID);
520}
521
522// Equivalent non-predicated opcode
523constexpr static std::optional<unsigned>
524getFunctionalOpcodeForVP(Intrinsic::ID ID) {
525 switch (ID) {
526 default:
527 break;
528#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
529#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
530#define END_REGISTER_VP_INTRINSIC(VPID) break;
531#include "llvm/IR/VPIntrinsics.def"
532 }
533 return std::nullopt;
534}
535
536std::optional<unsigned>
537VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
538 return ::getFunctionalOpcodeForVP(ID);
539}
540
541// Equivalent non-predicated intrinsic ID
542constexpr static std::optional<Intrinsic::ID>
543getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
544 switch (ID) {
545 default:
546 break;
547#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
548#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) return Intrinsic::INTRIN;
549#define END_REGISTER_VP_INTRINSIC(VPID) break;
550#include "llvm/IR/VPIntrinsics.def"
551 }
552 return std::nullopt;
553}
554
555std::optional<Intrinsic::ID>
556VPIntrinsic::getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
557 return ::getFunctionalIntrinsicIDForVP(ID);
558}
559
560constexpr static bool doesVPHaveNoFunctionalEquivalent(Intrinsic::ID ID) {
561 switch (ID) {
562 default:
563 break;
564#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
565#define VP_PROPERTY_NO_FUNCTIONAL return true;
566#define END_REGISTER_VP_INTRINSIC(VPID) break;
567#include "llvm/IR/VPIntrinsics.def"
568 }
569 return false;
570}
571
572// All VP intrinsics should have an equivalent non-VP opcode or intrinsic
573// defined, or be marked that they don't have one.
574#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) \
575 static_assert(doesVPHaveNoFunctionalEquivalent(Intrinsic::VPID) || \
576 getFunctionalOpcodeForVP(Intrinsic::VPID) || \
577 getFunctionalIntrinsicIDForVP(Intrinsic::VPID));
578#include "llvm/IR/VPIntrinsics.def"
579
580// Equivalent non-predicated constrained intrinsic
581std::optional<Intrinsic::ID>
582VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) {
583 switch (ID) {
584 default:
585 break;
586#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
587#define VP_PROPERTY_CONSTRAINEDFP(HASRND, HASEXCEPT, CID) return Intrinsic::CID;
588#define END_REGISTER_VP_INTRINSIC(VPID) break;
589#include "llvm/IR/VPIntrinsics.def"
590 }
591 return std::nullopt;
592}
593
594Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
595 switch (IROPC) {
596 default:
597 break;
598
599#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
600#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
601#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
602#include "llvm/IR/VPIntrinsics.def"
603 }
604 return Intrinsic::not_intrinsic;
605}
606
607bool VPIntrinsic::canIgnoreVectorLengthParam() const {
608 using namespace PatternMatch;
609
610 ElementCount EC = getStaticVectorLength();
611
612 // No vlen param - no lanes masked-off by it.
613 auto *VLParam = getVectorLengthParam();
614 if (!VLParam)
615 return true;
616
617 // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
618 // Length parameter is strictly greater-than the number of vector elements of
619 // the operation. This function returns true when this is detected statically
620 // in the IR.
621
622 // Check whether "W == vscale * EC.getKnownMinValue()"
623 if (EC.isScalable()) {
624 // Compare vscale patterns
625 uint64_t VScaleFactor;
626 if (match(V: VLParam, P: m_Mul(L: m_VScale(), R: m_ConstantInt(V&: VScaleFactor))))
627 return VScaleFactor >= EC.getKnownMinValue();
628 return (EC.getKnownMinValue() == 1) && match(V: VLParam, P: m_VScale());
629 }
630
631 // standard SIMD operation
632 const auto *VLConst = dyn_cast<ConstantInt>(Val: VLParam);
633 if (!VLConst)
634 return false;
635
636 uint64_t VLNum = VLConst->getZExtValue();
637 if (VLNum >= EC.getKnownMinValue())
638 return true;
639
640 return false;
641}
642
643Function *VPIntrinsic::getDeclarationForParams(Module *M, Intrinsic::ID VPID,
644 Type *ReturnType,
645 ArrayRef<Value *> Params) {
646 assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
647 Function *VPFunc;
648 switch (VPID) {
649 default: {
650 Type *OverloadTy = Params[0]->getType();
651 if (VPReductionIntrinsic::isVPReduction(ID: VPID))
652 OverloadTy =
653 Params[*VPReductionIntrinsic::getVectorParamPos(ID: VPID)]->getType();
654
655 VPFunc = Intrinsic::getDeclaration(M, id: VPID, Tys: OverloadTy);
656 break;
657 }
658 case Intrinsic::vp_trunc:
659 case Intrinsic::vp_sext:
660 case Intrinsic::vp_zext:
661 case Intrinsic::vp_fptoui:
662 case Intrinsic::vp_fptosi:
663 case Intrinsic::vp_uitofp:
664 case Intrinsic::vp_sitofp:
665 case Intrinsic::vp_fptrunc:
666 case Intrinsic::vp_fpext:
667 case Intrinsic::vp_ptrtoint:
668 case Intrinsic::vp_inttoptr:
669 case Intrinsic::vp_lrint:
670 case Intrinsic::vp_llrint:
671 VPFunc =
672 Intrinsic::getDeclaration(M, id: VPID, Tys: {ReturnType, Params[0]->getType()});
673 break;
674 case Intrinsic::vp_is_fpclass:
675 VPFunc = Intrinsic::getDeclaration(M, id: VPID, Tys: {Params[0]->getType()});
676 break;
677 case Intrinsic::vp_merge:
678 case Intrinsic::vp_select:
679 VPFunc = Intrinsic::getDeclaration(M, id: VPID, Tys: {Params[1]->getType()});
680 break;
681 case Intrinsic::vp_load:
682 VPFunc = Intrinsic::getDeclaration(
683 M, id: VPID, Tys: {ReturnType, Params[0]->getType()});
684 break;
685 case Intrinsic::experimental_vp_strided_load:
686 VPFunc = Intrinsic::getDeclaration(
687 M, id: VPID, Tys: {ReturnType, Params[0]->getType(), Params[1]->getType()});
688 break;
689 case Intrinsic::vp_gather:
690 VPFunc = Intrinsic::getDeclaration(
691 M, id: VPID, Tys: {ReturnType, Params[0]->getType()});
692 break;
693 case Intrinsic::vp_store:
694 VPFunc = Intrinsic::getDeclaration(
695 M, id: VPID, Tys: {Params[0]->getType(), Params[1]->getType()});
696 break;
697 case Intrinsic::experimental_vp_strided_store:
698 VPFunc = Intrinsic::getDeclaration(
699 M, id: VPID,
700 Tys: {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
701 break;
702 case Intrinsic::vp_scatter:
703 VPFunc = Intrinsic::getDeclaration(
704 M, id: VPID, Tys: {Params[0]->getType(), Params[1]->getType()});
705 break;
706 }
707 assert(VPFunc && "Could not declare VP intrinsic");
708 return VPFunc;
709}
710
711bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
712 switch (ID) {
713 default:
714 break;
715#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
716#define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true;
717#define END_REGISTER_VP_INTRINSIC(VPID) break;
718#include "llvm/IR/VPIntrinsics.def"
719 }
720 return false;
721}
722
723bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
724 switch (ID) {
725 default:
726 break;
727#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
728#define VP_PROPERTY_CASTOP return true;
729#define END_REGISTER_VP_INTRINSIC(VPID) break;
730#include "llvm/IR/VPIntrinsics.def"
731 }
732 return false;
733}
734
735bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
736 switch (ID) {
737 default:
738 break;
739#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
740#define VP_PROPERTY_CMP(CCPOS, ...) return true;
741#define END_REGISTER_VP_INTRINSIC(VPID) break;
742#include "llvm/IR/VPIntrinsics.def"
743 }
744 return false;
745}
746
747bool VPBinOpIntrinsic::isVPBinOp(Intrinsic::ID ID) {
748 switch (ID) {
749 default:
750 break;
751#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
752#define VP_PROPERTY_BINARYOP return true;
753#define END_REGISTER_VP_INTRINSIC(VPID) break;
754#include "llvm/IR/VPIntrinsics.def"
755 }
756 return false;
757}
758
759static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
760 Metadata *MD = cast<MetadataAsValue>(Val: Op)->getMetadata();
761 if (!MD || !isa<MDString>(Val: MD))
762 return ICmpInst::BAD_ICMP_PREDICATE;
763 return StringSwitch<ICmpInst::Predicate>(cast<MDString>(Val: MD)->getString())
764 .Case(S: "eq", Value: ICmpInst::ICMP_EQ)
765 .Case(S: "ne", Value: ICmpInst::ICMP_NE)
766 .Case(S: "ugt", Value: ICmpInst::ICMP_UGT)
767 .Case(S: "uge", Value: ICmpInst::ICMP_UGE)
768 .Case(S: "ult", Value: ICmpInst::ICMP_ULT)
769 .Case(S: "ule", Value: ICmpInst::ICMP_ULE)
770 .Case(S: "sgt", Value: ICmpInst::ICMP_SGT)
771 .Case(S: "sge", Value: ICmpInst::ICMP_SGE)
772 .Case(S: "slt", Value: ICmpInst::ICMP_SLT)
773 .Case(S: "sle", Value: ICmpInst::ICMP_SLE)
774 .Default(Value: ICmpInst::BAD_ICMP_PREDICATE);
775}
776
777CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
778 bool IsFP = true;
779 std::optional<unsigned> CCArgIdx;
780 switch (getIntrinsicID()) {
781 default:
782 break;
783#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
784#define VP_PROPERTY_CMP(CCPOS, ISFP) \
785 CCArgIdx = CCPOS; \
786 IsFP = ISFP; \
787 break;
788#define END_REGISTER_VP_INTRINSIC(VPID) break;
789#include "llvm/IR/VPIntrinsics.def"
790 }
791 assert(CCArgIdx && "Unexpected vector-predicated comparison");
792 return IsFP ? getFPPredicateFromMD(Op: getArgOperand(i: *CCArgIdx))
793 : getIntPredicateFromMD(Op: getArgOperand(i: *CCArgIdx));
794}
795
796unsigned VPReductionIntrinsic::getVectorParamPos() const {
797 return *VPReductionIntrinsic::getVectorParamPos(ID: getIntrinsicID());
798}
799
800unsigned VPReductionIntrinsic::getStartParamPos() const {
801 return *VPReductionIntrinsic::getStartParamPos(ID: getIntrinsicID());
802}
803
804std::optional<unsigned>
805VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
806 switch (ID) {
807#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
808#define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS;
809#define END_REGISTER_VP_INTRINSIC(VPID) break;
810#include "llvm/IR/VPIntrinsics.def"
811 default:
812 break;
813 }
814 return std::nullopt;
815}
816
817std::optional<unsigned>
818VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
819 switch (ID) {
820#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
821#define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS;
822#define END_REGISTER_VP_INTRINSIC(VPID) break;
823#include "llvm/IR/VPIntrinsics.def"
824 default:
825 break;
826 }
827 return std::nullopt;
828}
829
830Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
831 switch (getIntrinsicID()) {
832 case Intrinsic::uadd_with_overflow:
833 case Intrinsic::sadd_with_overflow:
834 case Intrinsic::uadd_sat:
835 case Intrinsic::sadd_sat:
836 return Instruction::Add;
837 case Intrinsic::usub_with_overflow:
838 case Intrinsic::ssub_with_overflow:
839 case Intrinsic::usub_sat:
840 case Intrinsic::ssub_sat:
841 return Instruction::Sub;
842 case Intrinsic::umul_with_overflow:
843 case Intrinsic::smul_with_overflow:
844 return Instruction::Mul;
845 default:
846 llvm_unreachable("Invalid intrinsic");
847 }
848}
849
850bool BinaryOpIntrinsic::isSigned() const {
851 switch (getIntrinsicID()) {
852 case Intrinsic::sadd_with_overflow:
853 case Intrinsic::ssub_with_overflow:
854 case Intrinsic::smul_with_overflow:
855 case Intrinsic::sadd_sat:
856 case Intrinsic::ssub_sat:
857 return true;
858 default:
859 return false;
860 }
861}
862
863unsigned BinaryOpIntrinsic::getNoWrapKind() const {
864 if (isSigned())
865 return OverflowingBinaryOperator::NoSignedWrap;
866 else
867 return OverflowingBinaryOperator::NoUnsignedWrap;
868}
869
870const Value *GCProjectionInst::getStatepoint() const {
871 const Value *Token = getArgOperand(i: 0);
872 if (isa<UndefValue>(Val: Token))
873 return Token;
874
875 // Treat none token as if it was undef here
876 if (isa<ConstantTokenNone>(Val: Token))
877 return UndefValue::get(T: Token->getType());
878
879 // This takes care both of relocates for call statepoints and relocates
880 // on normal path of invoke statepoint.
881 if (!isa<LandingPadInst>(Val: Token))
882 return cast<GCStatepointInst>(Val: Token);
883
884 // This relocate is on exceptional path of an invoke statepoint
885 const BasicBlock *InvokeBB =
886 cast<Instruction>(Val: Token)->getParent()->getUniquePredecessor();
887
888 assert(InvokeBB && "safepoints should have unique landingpads");
889 assert(InvokeBB->getTerminator() &&
890 "safepoint block should be well formed");
891
892 return cast<GCStatepointInst>(Val: InvokeBB->getTerminator());
893}
894
895Value *GCRelocateInst::getBasePtr() const {
896 auto Statepoint = getStatepoint();
897 if (isa<UndefValue>(Val: Statepoint))
898 return UndefValue::get(T: Statepoint->getType());
899
900 auto *GCInst = cast<GCStatepointInst>(Val: Statepoint);
901 if (auto Opt = GCInst->getOperandBundle(ID: LLVMContext::OB_gc_live))
902 return *(Opt->Inputs.begin() + getBasePtrIndex());
903 return *(GCInst->arg_begin() + getBasePtrIndex());
904}
905
906Value *GCRelocateInst::getDerivedPtr() const {
907 auto *Statepoint = getStatepoint();
908 if (isa<UndefValue>(Val: Statepoint))
909 return UndefValue::get(T: Statepoint->getType());
910
911 auto *GCInst = cast<GCStatepointInst>(Val: Statepoint);
912 if (auto Opt = GCInst->getOperandBundle(ID: LLVMContext::OB_gc_live))
913 return *(Opt->Inputs.begin() + getDerivedPtrIndex());
914 return *(GCInst->arg_begin() + getDerivedPtrIndex());
915}
916

source code of llvm/lib/IR/IntrinsicInst.cpp