1 //===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements methods that make it really easy to deal with intrinsic
10 // functions.
11 //
12 // All intrinsic function calls are instances of the call instruction, so these
13 // are all subclasses of the CallInst class.  Note that none of these classes
14 // has state or virtual methods, which is an important part of this gross/neat
15 // hack working.
16 //
17 // In some cases, arguments to intrinsics need to be generic and are defined as
18 // type pointer to empty struct { }*.  To access the real item of interest the
19 // cast instruction needs to be stripped away.
20 //
21 //===----------------------------------------------------------------------===//
22 
23 #include "llvm/IR/IntrinsicInst.h"
24 #include "llvm/ADT/StringSwitch.h"
25 #include "llvm/IR/Constants.h"
26 #include "llvm/IR/DebugInfoMetadata.h"
27 #include "llvm/IR/Metadata.h"
28 #include "llvm/IR/Module.h"
29 #include "llvm/IR/Operator.h"
30 #include "llvm/IR/PatternMatch.h"
31 #include "llvm/IR/Statepoint.h"
32 
33 using namespace llvm;
34 
35 bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
36   switch (IID) {
37   case Intrinsic::objc_autorelease:
38   case Intrinsic::objc_autoreleasePoolPop:
39   case Intrinsic::objc_autoreleasePoolPush:
40   case Intrinsic::objc_autoreleaseReturnValue:
41   case Intrinsic::objc_copyWeak:
42   case Intrinsic::objc_destroyWeak:
43   case Intrinsic::objc_initWeak:
44   case Intrinsic::objc_loadWeak:
45   case Intrinsic::objc_loadWeakRetained:
46   case Intrinsic::objc_moveWeak:
47   case Intrinsic::objc_release:
48   case Intrinsic::objc_retain:
49   case Intrinsic::objc_retainAutorelease:
50   case Intrinsic::objc_retainAutoreleaseReturnValue:
51   case Intrinsic::objc_retainAutoreleasedReturnValue:
52   case Intrinsic::objc_retainBlock:
53   case Intrinsic::objc_storeStrong:
54   case Intrinsic::objc_storeWeak:
55   case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
56   case Intrinsic::objc_retainedObject:
57   case Intrinsic::objc_unretainedObject:
58   case Intrinsic::objc_unretainedPointer:
59   case Intrinsic::objc_retain_autorelease:
60   case Intrinsic::objc_sync_enter:
61   case Intrinsic::objc_sync_exit:
62     return true;
63   default:
64     return false;
65   }
66 }
67 
68 //===----------------------------------------------------------------------===//
69 /// DbgVariableIntrinsic - This is the common base class for debug info
70 /// intrinsics for variables.
71 ///
72 
73 iterator_range<DbgVariableIntrinsic::location_op_iterator>
74 DbgVariableIntrinsic::location_ops() const {
75   auto *MD = getRawLocation();
76   assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
77 
78   // If operand is ValueAsMetadata, return a range over just that operand.
79   if (auto *VAM = dyn_cast<ValueAsMetadata>(MD)) {
80     return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
81   }
82   // If operand is DIArgList, return a range over its args.
83   if (auto *AL = dyn_cast<DIArgList>(MD))
84     return {location_op_iterator(AL->args_begin()),
85             location_op_iterator(AL->args_end())};
86   // Operand must be an empty metadata tuple, so return empty iterator.
87   return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
88           location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
89 }
90 
91 Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
92   auto *MD = getRawLocation();
93   assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
94   if (auto *AL = dyn_cast<DIArgList>(MD))
95     return AL->getArgs()[OpIdx]->getValue();
96   if (isa<MDNode>(MD))
97     return nullptr;
98   assert(
99       isa<ValueAsMetadata>(MD) &&
100       "Attempted to get location operand from DbgVariableIntrinsic with none.");
101   auto *V = cast<ValueAsMetadata>(MD);
102   assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
103                        "single location operand.");
104   return V->getValue();
105 }
106 
107 static ValueAsMetadata *getAsMetadata(Value *V) {
108   return isa<MetadataAsValue>(V) ? dyn_cast<ValueAsMetadata>(
109                                        cast<MetadataAsValue>(V)->getMetadata())
110                                  : ValueAsMetadata::get(V);
111 }
112 
113 void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
114                                                      Value *NewValue) {
115   assert(NewValue && "Values must be non-null");
116   auto Locations = location_ops();
117   auto OldIt = find(Locations, OldValue);
118   assert(OldIt != Locations.end() && "OldValue must be a current location");
119   if (!hasArgList()) {
120     Value *NewOperand = isa<MetadataAsValue>(NewValue)
121                             ? NewValue
122                             : MetadataAsValue::get(
123                                   getContext(), ValueAsMetadata::get(NewValue));
124     return setArgOperand(0, NewOperand);
125   }
126   SmallVector<ValueAsMetadata *, 4> MDs;
127   ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
128   for (auto *VMD : Locations)
129     MDs.push_back(VMD == *OldIt ? NewOperand : getAsMetadata(VMD));
130   setArgOperand(
131       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
132 }
133 void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
134                                                      Value *NewValue) {
135   assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
136   if (!hasArgList()) {
137     Value *NewOperand = isa<MetadataAsValue>(NewValue)
138                             ? NewValue
139                             : MetadataAsValue::get(
140                                   getContext(), ValueAsMetadata::get(NewValue));
141     return setArgOperand(0, NewOperand);
142   }
143   SmallVector<ValueAsMetadata *, 4> MDs;
144   ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
145   for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
146     MDs.push_back(Idx == OpIdx ? NewOperand
147                                : getAsMetadata(getVariableLocationOp(Idx)));
148   setArgOperand(
149       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
150 }
151 
152 void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
153                                                   DIExpression *NewExpr) {
154   assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
155                                     NewValues.size()) &&
156          "NewExpr for debug variable intrinsic does not reference every "
157          "location operand.");
158   assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
159   setArgOperand(2, MetadataAsValue::get(getContext(), NewExpr));
160   SmallVector<ValueAsMetadata *, 4> MDs;
161   for (auto *VMD : location_ops())
162     MDs.push_back(getAsMetadata(VMD));
163   for (auto *VMD : NewValues)
164     MDs.push_back(getAsMetadata(VMD));
165   setArgOperand(
166       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
167 }
168 
169 Optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
170   if (auto Fragment = getExpression()->getFragmentInfo())
171     return Fragment->SizeInBits;
172   return getVariable()->getSizeInBits();
173 }
174 
175 int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable,
176                                                StringRef Name) {
177   assert(Name.startswith("llvm."));
178 
179   // Do successive binary searches of the dotted name components. For
180   // "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of
181   // intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then
182   // "llvm.gc.experimental.statepoint", and then we will stop as the range is
183   // size 1. During the search, we can skip the prefix that we already know is
184   // identical. By using strncmp we consider names with differing suffixes to
185   // be part of the equal range.
186   size_t CmpEnd = 4; // Skip the "llvm" component.
187   const char *const *Low = NameTable.begin();
188   const char *const *High = NameTable.end();
189   const char *const *LastLow = Low;
190   while (CmpEnd < Name.size() && High - Low > 0) {
191     size_t CmpStart = CmpEnd;
192     CmpEnd = Name.find('.', CmpStart + 1);
193     CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd;
194     auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) {
195       return strncmp(LHS + CmpStart, RHS + CmpStart, CmpEnd - CmpStart) < 0;
196     };
197     LastLow = Low;
198     std::tie(Low, High) = std::equal_range(Low, High, Name.data(), Cmp);
199   }
200   if (High - Low > 0)
201     LastLow = Low;
202 
203   if (LastLow == NameTable.end())
204     return -1;
205   StringRef NameFound = *LastLow;
206   if (Name == NameFound ||
207       (Name.startswith(NameFound) && Name[NameFound.size()] == '.'))
208     return LastLow - NameTable.begin();
209   return -1;
210 }
211 
212 ConstantInt *InstrProfInstBase::getNumCounters() const {
213   if (InstrProfValueProfileInst::classof(this))
214     llvm_unreachable("InstrProfValueProfileInst does not have counters!");
215   return cast<ConstantInt>(const_cast<Value *>(getArgOperand(2)));
216 }
217 
218 ConstantInt *InstrProfInstBase::getIndex() const {
219   if (InstrProfValueProfileInst::classof(this))
220     llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
221   return cast<ConstantInt>(const_cast<Value *>(getArgOperand(3)));
222 }
223 
224 Value *InstrProfIncrementInst::getStep() const {
225   if (InstrProfIncrementInstStep::classof(this)) {
226     return const_cast<Value *>(getArgOperand(4));
227   }
228   const Module *M = getModule();
229   LLVMContext &Context = M->getContext();
230   return ConstantInt::get(Type::getInt64Ty(Context), 1);
231 }
232 
233 Optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
234   unsigned NumOperands = arg_size();
235   Metadata *MD = nullptr;
236   auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 2));
237   if (MAV)
238     MD = MAV->getMetadata();
239   if (!MD || !isa<MDString>(MD))
240     return None;
241   return convertStrToRoundingMode(cast<MDString>(MD)->getString());
242 }
243 
244 Optional<fp::ExceptionBehavior>
245 ConstrainedFPIntrinsic::getExceptionBehavior() const {
246   unsigned NumOperands = arg_size();
247   Metadata *MD = nullptr;
248   auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 1));
249   if (MAV)
250     MD = MAV->getMetadata();
251   if (!MD || !isa<MDString>(MD))
252     return None;
253   return convertStrToExceptionBehavior(cast<MDString>(MD)->getString());
254 }
255 
256 bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
257   Optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
258   if (Except) {
259     if (Except.value() != fp::ebIgnore)
260       return false;
261   }
262 
263   Optional<RoundingMode> Rounding = getRoundingMode();
264   if (Rounding) {
265     if (Rounding.value() != RoundingMode::NearestTiesToEven)
266       return false;
267   }
268 
269   return true;
270 }
271 
272 static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
273   Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
274   if (!MD || !isa<MDString>(MD))
275     return FCmpInst::BAD_FCMP_PREDICATE;
276   return StringSwitch<FCmpInst::Predicate>(cast<MDString>(MD)->getString())
277       .Case("oeq", FCmpInst::FCMP_OEQ)
278       .Case("ogt", FCmpInst::FCMP_OGT)
279       .Case("oge", FCmpInst::FCMP_OGE)
280       .Case("olt", FCmpInst::FCMP_OLT)
281       .Case("ole", FCmpInst::FCMP_OLE)
282       .Case("one", FCmpInst::FCMP_ONE)
283       .Case("ord", FCmpInst::FCMP_ORD)
284       .Case("uno", FCmpInst::FCMP_UNO)
285       .Case("ueq", FCmpInst::FCMP_UEQ)
286       .Case("ugt", FCmpInst::FCMP_UGT)
287       .Case("uge", FCmpInst::FCMP_UGE)
288       .Case("ult", FCmpInst::FCMP_ULT)
289       .Case("ule", FCmpInst::FCMP_ULE)
290       .Case("une", FCmpInst::FCMP_UNE)
291       .Default(FCmpInst::BAD_FCMP_PREDICATE);
292 }
293 
294 FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
295   return getFPPredicateFromMD(getArgOperand(2));
296 }
297 
298 bool ConstrainedFPIntrinsic::isUnaryOp() const {
299   switch (getIntrinsicID()) {
300   default:
301     return false;
302 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC)                         \
303   case Intrinsic::INTRINSIC:                                                   \
304     return NARG == 1;
305 #include "llvm/IR/ConstrainedOps.def"
306   }
307 }
308 
309 bool ConstrainedFPIntrinsic::isTernaryOp() const {
310   switch (getIntrinsicID()) {
311   default:
312     return false;
313 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC)                         \
314   case Intrinsic::INTRINSIC:                                                   \
315     return NARG == 3;
316 #include "llvm/IR/ConstrainedOps.def"
317   }
318 }
319 
320 bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
321   switch (I->getIntrinsicID()) {
322 #define INSTRUCTION(NAME, NARGS, ROUND_MODE, INTRINSIC)                        \
323   case Intrinsic::INTRINSIC:
324 #include "llvm/IR/ConstrainedOps.def"
325     return true;
326   default:
327     return false;
328   }
329 }
330 
331 ElementCount VPIntrinsic::getStaticVectorLength() const {
332   auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
333     const auto *VT = cast<VectorType>(T);
334     auto ElemCount = VT->getElementCount();
335     return ElemCount;
336   };
337 
338   Value *VPMask = getMaskParam();
339   if (!VPMask) {
340     assert((getIntrinsicID() == Intrinsic::vp_merge ||
341             getIntrinsicID() == Intrinsic::vp_select) &&
342            "Unexpected VP intrinsic without mask operand");
343     return GetVectorLengthOfType(getType());
344   }
345   return GetVectorLengthOfType(VPMask->getType());
346 }
347 
348 Value *VPIntrinsic::getMaskParam() const {
349   if (auto MaskPos = getMaskParamPos(getIntrinsicID()))
350     return getArgOperand(*MaskPos);
351   return nullptr;
352 }
353 
354 void VPIntrinsic::setMaskParam(Value *NewMask) {
355   auto MaskPos = getMaskParamPos(getIntrinsicID());
356   setArgOperand(*MaskPos, NewMask);
357 }
358 
359 Value *VPIntrinsic::getVectorLengthParam() const {
360   if (auto EVLPos = getVectorLengthParamPos(getIntrinsicID()))
361     return getArgOperand(*EVLPos);
362   return nullptr;
363 }
364 
365 void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
366   auto EVLPos = getVectorLengthParamPos(getIntrinsicID());
367   setArgOperand(*EVLPos, NewEVL);
368 }
369 
370 Optional<unsigned> VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
371   switch (IntrinsicID) {
372   default:
373     return None;
374 
375 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
376   case Intrinsic::VPID:                                                        \
377     return MASKPOS;
378 #include "llvm/IR/VPIntrinsics.def"
379   }
380 }
381 
382 Optional<unsigned>
383 VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
384   switch (IntrinsicID) {
385   default:
386     return None;
387 
388 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
389   case Intrinsic::VPID:                                                        \
390     return VLENPOS;
391 #include "llvm/IR/VPIntrinsics.def"
392   }
393 }
394 
395 /// \return the alignment of the pointer used by this load/store/gather or
396 /// scatter.
397 MaybeAlign VPIntrinsic::getPointerAlignment() const {
398   Optional<unsigned> PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID());
399   assert(PtrParamOpt && "no pointer argument!");
400   return getParamAlign(PtrParamOpt.value());
401 }
402 
403 /// \return The pointer operand of this load,store, gather or scatter.
404 Value *VPIntrinsic::getMemoryPointerParam() const {
405   if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
406     return getArgOperand(PtrParamOpt.value());
407   return nullptr;
408 }
409 
410 Optional<unsigned> VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
411   switch (VPID) {
412   default:
413     break;
414 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
415 #define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
416 #define END_REGISTER_VP_INTRINSIC(VPID) break;
417 #include "llvm/IR/VPIntrinsics.def"
418   }
419   return None;
420 }
421 
422 /// \return The data (payload) operand of this store or scatter.
423 Value *VPIntrinsic::getMemoryDataParam() const {
424   auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
425   if (!DataParamOpt)
426     return nullptr;
427   return getArgOperand(DataParamOpt.value());
428 }
429 
430 Optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
431   switch (VPID) {
432   default:
433     break;
434 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
435 #define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
436 #define END_REGISTER_VP_INTRINSIC(VPID) break;
437 #include "llvm/IR/VPIntrinsics.def"
438   }
439   return None;
440 }
441 
442 bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
443   switch (ID) {
444   default:
445     break;
446 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
447   case Intrinsic::VPID:                                                        \
448     return true;
449 #include "llvm/IR/VPIntrinsics.def"
450   }
451   return false;
452 }
453 
454 // Equivalent non-predicated opcode
455 Optional<unsigned> VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
456   switch (ID) {
457   default:
458     break;
459 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
460 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
461 #define END_REGISTER_VP_INTRINSIC(VPID) break;
462 #include "llvm/IR/VPIntrinsics.def"
463   }
464   return None;
465 }
466 
467 Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
468   switch (IROPC) {
469   default:
470     break;
471 
472 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
473 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
474 #define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
475 #include "llvm/IR/VPIntrinsics.def"
476   }
477   return Intrinsic::not_intrinsic;
478 }
479 
480 bool VPIntrinsic::canIgnoreVectorLengthParam() const {
481   using namespace PatternMatch;
482 
483   ElementCount EC = getStaticVectorLength();
484 
485   // No vlen param - no lanes masked-off by it.
486   auto *VLParam = getVectorLengthParam();
487   if (!VLParam)
488     return true;
489 
490   // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
491   // Length parameter is strictly greater-than the number of vector elements of
492   // the operation. This function returns true when this is detected statically
493   // in the IR.
494 
495   // Check whether "W == vscale * EC.getKnownMinValue()"
496   if (EC.isScalable()) {
497     // Undig the DL
498     const auto *ParMod = this->getModule();
499     if (!ParMod)
500       return false;
501     const auto &DL = ParMod->getDataLayout();
502 
503     // Compare vscale patterns
504     uint64_t VScaleFactor;
505     if (match(VLParam, m_c_Mul(m_ConstantInt(VScaleFactor), m_VScale(DL))))
506       return VScaleFactor >= EC.getKnownMinValue();
507     return (EC.getKnownMinValue() == 1) && match(VLParam, m_VScale(DL));
508   }
509 
510   // standard SIMD operation
511   const auto *VLConst = dyn_cast<ConstantInt>(VLParam);
512   if (!VLConst)
513     return false;
514 
515   uint64_t VLNum = VLConst->getZExtValue();
516   if (VLNum >= EC.getKnownMinValue())
517     return true;
518 
519   return false;
520 }
521 
522 Function *VPIntrinsic::getDeclarationForParams(Module *M, Intrinsic::ID VPID,
523                                                Type *ReturnType,
524                                                ArrayRef<Value *> Params) {
525   assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
526   Function *VPFunc;
527   switch (VPID) {
528   default: {
529     Type *OverloadTy = Params[0]->getType();
530     if (VPReductionIntrinsic::isVPReduction(VPID))
531       OverloadTy =
532           Params[*VPReductionIntrinsic::getVectorParamPos(VPID)]->getType();
533 
534     VPFunc = Intrinsic::getDeclaration(M, VPID, OverloadTy);
535     break;
536   }
537   case Intrinsic::vp_trunc:
538   case Intrinsic::vp_sext:
539   case Intrinsic::vp_zext:
540   case Intrinsic::vp_fptoui:
541   case Intrinsic::vp_fptosi:
542   case Intrinsic::vp_uitofp:
543   case Intrinsic::vp_sitofp:
544   case Intrinsic::vp_fptrunc:
545   case Intrinsic::vp_fpext:
546   case Intrinsic::vp_ptrtoint:
547   case Intrinsic::vp_inttoptr:
548     VPFunc =
549         Intrinsic::getDeclaration(M, VPID, {ReturnType, Params[0]->getType()});
550     break;
551   case Intrinsic::vp_merge:
552   case Intrinsic::vp_select:
553     VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[1]->getType()});
554     break;
555   case Intrinsic::vp_load:
556     VPFunc = Intrinsic::getDeclaration(
557         M, VPID, {ReturnType, Params[0]->getType()});
558     break;
559   case Intrinsic::experimental_vp_strided_load:
560     VPFunc = Intrinsic::getDeclaration(
561         M, VPID, {ReturnType, Params[0]->getType(), Params[1]->getType()});
562     break;
563   case Intrinsic::vp_gather:
564     VPFunc = Intrinsic::getDeclaration(
565         M, VPID, {ReturnType, Params[0]->getType()});
566     break;
567   case Intrinsic::vp_store:
568     VPFunc = Intrinsic::getDeclaration(
569         M, VPID, {Params[0]->getType(), Params[1]->getType()});
570     break;
571   case Intrinsic::experimental_vp_strided_store:
572     VPFunc = Intrinsic::getDeclaration(
573         M, VPID,
574         {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
575     break;
576   case Intrinsic::vp_scatter:
577     VPFunc = Intrinsic::getDeclaration(
578         M, VPID, {Params[0]->getType(), Params[1]->getType()});
579     break;
580   }
581   assert(VPFunc && "Could not declare VP intrinsic");
582   return VPFunc;
583 }
584 
585 bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
586   switch (ID) {
587   default:
588     break;
589 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
590 #define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true;
591 #define END_REGISTER_VP_INTRINSIC(VPID) break;
592 #include "llvm/IR/VPIntrinsics.def"
593   }
594   return false;
595 }
596 
597 bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
598   switch (ID) {
599   default:
600     break;
601 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
602 #define VP_PROPERTY_CASTOP return true;
603 #define END_REGISTER_VP_INTRINSIC(VPID) break;
604 #include "llvm/IR/VPIntrinsics.def"
605   }
606   return false;
607 }
608 
609 bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
610   switch (ID) {
611   default:
612     break;
613 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
614 #define VP_PROPERTY_CMP(CCPOS, ...) return true;
615 #define END_REGISTER_VP_INTRINSIC(VPID) break;
616 #include "llvm/IR/VPIntrinsics.def"
617   }
618   return false;
619 }
620 
621 static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
622   Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
623   if (!MD || !isa<MDString>(MD))
624     return ICmpInst::BAD_ICMP_PREDICATE;
625   return StringSwitch<ICmpInst::Predicate>(cast<MDString>(MD)->getString())
626       .Case("eq", ICmpInst::ICMP_EQ)
627       .Case("ne", ICmpInst::ICMP_NE)
628       .Case("ugt", ICmpInst::ICMP_UGT)
629       .Case("uge", ICmpInst::ICMP_UGE)
630       .Case("ult", ICmpInst::ICMP_ULT)
631       .Case("ule", ICmpInst::ICMP_ULE)
632       .Case("sgt", ICmpInst::ICMP_SGT)
633       .Case("sge", ICmpInst::ICMP_SGE)
634       .Case("slt", ICmpInst::ICMP_SLT)
635       .Case("sle", ICmpInst::ICMP_SLE)
636       .Default(ICmpInst::BAD_ICMP_PREDICATE);
637 }
638 
639 CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
640   bool IsFP = true;
641   Optional<unsigned> CCArgIdx;
642   switch (getIntrinsicID()) {
643   default:
644     break;
645 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
646 #define VP_PROPERTY_CMP(CCPOS, ISFP)                                           \
647   CCArgIdx = CCPOS;                                                            \
648   IsFP = ISFP;                                                                 \
649   break;
650 #define END_REGISTER_VP_INTRINSIC(VPID) break;
651 #include "llvm/IR/VPIntrinsics.def"
652   }
653   assert(CCArgIdx && "Unexpected vector-predicated comparison");
654   return IsFP ? getFPPredicateFromMD(getArgOperand(*CCArgIdx))
655               : getIntPredicateFromMD(getArgOperand(*CCArgIdx));
656 }
657 
658 unsigned VPReductionIntrinsic::getVectorParamPos() const {
659   return *VPReductionIntrinsic::getVectorParamPos(getIntrinsicID());
660 }
661 
662 unsigned VPReductionIntrinsic::getStartParamPos() const {
663   return *VPReductionIntrinsic::getStartParamPos(getIntrinsicID());
664 }
665 
666 Optional<unsigned> VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
667   switch (ID) {
668 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
669 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS;
670 #define END_REGISTER_VP_INTRINSIC(VPID) break;
671 #include "llvm/IR/VPIntrinsics.def"
672   default:
673     break;
674   }
675   return None;
676 }
677 
678 Optional<unsigned> VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
679   switch (ID) {
680 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
681 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS;
682 #define END_REGISTER_VP_INTRINSIC(VPID) break;
683 #include "llvm/IR/VPIntrinsics.def"
684   default:
685     break;
686   }
687   return None;
688 }
689 
690 Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
691   switch (getIntrinsicID()) {
692   case Intrinsic::uadd_with_overflow:
693   case Intrinsic::sadd_with_overflow:
694   case Intrinsic::uadd_sat:
695   case Intrinsic::sadd_sat:
696     return Instruction::Add;
697   case Intrinsic::usub_with_overflow:
698   case Intrinsic::ssub_with_overflow:
699   case Intrinsic::usub_sat:
700   case Intrinsic::ssub_sat:
701     return Instruction::Sub;
702   case Intrinsic::umul_with_overflow:
703   case Intrinsic::smul_with_overflow:
704     return Instruction::Mul;
705   default:
706     llvm_unreachable("Invalid intrinsic");
707   }
708 }
709 
710 bool BinaryOpIntrinsic::isSigned() const {
711   switch (getIntrinsicID()) {
712   case Intrinsic::sadd_with_overflow:
713   case Intrinsic::ssub_with_overflow:
714   case Intrinsic::smul_with_overflow:
715   case Intrinsic::sadd_sat:
716   case Intrinsic::ssub_sat:
717     return true;
718   default:
719     return false;
720   }
721 }
722 
723 unsigned BinaryOpIntrinsic::getNoWrapKind() const {
724   if (isSigned())
725     return OverflowingBinaryOperator::NoSignedWrap;
726   else
727     return OverflowingBinaryOperator::NoUnsignedWrap;
728 }
729 
730 const Value *GCProjectionInst::getStatepoint() const {
731   const Value *Token = getArgOperand(0);
732   if (isa<UndefValue>(Token))
733     return Token;
734 
735   // This takes care both of relocates for call statepoints and relocates
736   // on normal path of invoke statepoint.
737   if (!isa<LandingPadInst>(Token))
738     return cast<GCStatepointInst>(Token);
739 
740   // This relocate is on exceptional path of an invoke statepoint
741   const BasicBlock *InvokeBB =
742     cast<Instruction>(Token)->getParent()->getUniquePredecessor();
743 
744   assert(InvokeBB && "safepoints should have unique landingpads");
745   assert(InvokeBB->getTerminator() &&
746          "safepoint block should be well formed");
747 
748   return cast<GCStatepointInst>(InvokeBB->getTerminator());
749 }
750 
751 Value *GCRelocateInst::getBasePtr() const {
752   auto Statepoint = getStatepoint();
753   if (isa<UndefValue>(Statepoint))
754     return UndefValue::get(Statepoint->getType());
755 
756   auto *GCInst = cast<GCStatepointInst>(Statepoint);
757   if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
758     return *(Opt->Inputs.begin() + getBasePtrIndex());
759   return *(GCInst->arg_begin() + getBasePtrIndex());
760 }
761 
762 Value *GCRelocateInst::getDerivedPtr() const {
763   auto *Statepoint = getStatepoint();
764   if (isa<UndefValue>(Statepoint))
765     return UndefValue::get(Statepoint->getType());
766 
767   auto *GCInst = cast<GCStatepointInst>(Statepoint);
768   if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
769     return *(Opt->Inputs.begin() + getDerivedPtrIndex());
770   return *(GCInst->arg_begin() + getDerivedPtrIndex());
771 }
772