1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 /// \file
9 /// This file implements the MachineIRBuidler class.
10 //===----------------------------------------------------------------------===//
11 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12 #include "llvm/Analysis/MemoryLocation.h"
13 #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetLowering.h"
20 #include "llvm/CodeGen/TargetOpcodes.h"
21 #include "llvm/CodeGen/TargetSubtargetInfo.h"
22 #include "llvm/IR/DebugInfo.h"
23 
24 using namespace llvm;
25 
26 void MachineIRBuilder::setMF(MachineFunction &MF) {
27   State.MF = &MF;
28   State.MBB = nullptr;
29   State.MRI = &MF.getRegInfo();
30   State.TII = MF.getSubtarget().getInstrInfo();
31   State.DL = DebugLoc();
32   State.II = MachineBasicBlock::iterator();
33   State.Observer = nullptr;
34 }
35 
36 //------------------------------------------------------------------------------
37 // Build instruction variants.
38 //------------------------------------------------------------------------------
39 
40 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
41   MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
42   return MIB;
43 }
44 
45 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
46   getMBB().insert(getInsertPt(), MIB);
47   recordInsertion(MIB);
48   return MIB;
49 }
50 
51 MachineInstrBuilder
52 MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
53                                       const MDNode *Expr) {
54   assert(isa<DILocalVariable>(Variable) && "not a variable");
55   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
56   assert(
57       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
58       "Expected inlined-at fields to agree");
59   return insertInstr(BuildMI(getMF(), getDL(),
60                              getTII().get(TargetOpcode::DBG_VALUE),
61                              /*IsIndirect*/ false, Reg, Variable, Expr));
62 }
63 
64 MachineInstrBuilder
65 MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
66                                         const MDNode *Expr) {
67   assert(isa<DILocalVariable>(Variable) && "not a variable");
68   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
69   assert(
70       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
71       "Expected inlined-at fields to agree");
72   return insertInstr(BuildMI(getMF(), getDL(),
73                              getTII().get(TargetOpcode::DBG_VALUE),
74                              /*IsIndirect*/ true, Reg, Variable, Expr));
75 }
76 
77 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
78                                                       const MDNode *Variable,
79                                                       const MDNode *Expr) {
80   assert(isa<DILocalVariable>(Variable) && "not a variable");
81   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
82   assert(
83       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
84       "Expected inlined-at fields to agree");
85   return buildInstr(TargetOpcode::DBG_VALUE)
86       .addFrameIndex(FI)
87       .addImm(0)
88       .addMetadata(Variable)
89       .addMetadata(Expr);
90 }
91 
92 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
93                                                          const MDNode *Variable,
94                                                          const MDNode *Expr) {
95   assert(isa<DILocalVariable>(Variable) && "not a variable");
96   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
97   assert(
98       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
99       "Expected inlined-at fields to agree");
100   auto MIB = buildInstrNoInsert(TargetOpcode::DBG_VALUE);
101   if (auto *CI = dyn_cast<ConstantInt>(&C)) {
102     if (CI->getBitWidth() > 64)
103       MIB.addCImm(CI);
104     else
105       MIB.addImm(CI->getZExtValue());
106   } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
107     MIB.addFPImm(CFP);
108   } else {
109     // Insert $noreg if we didn't find a usable constant and had to drop it.
110     MIB.addReg(Register());
111   }
112 
113   MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
114   return insertInstr(MIB);
115 }
116 
117 MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
118   assert(isa<DILabel>(Label) && "not a label");
119   assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
120          "Expected inlined-at fields to agree");
121   auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
122 
123   return MIB.addMetadata(Label);
124 }
125 
126 MachineInstrBuilder MachineIRBuilder::buildDynStackAlloc(const DstOp &Res,
127                                                          const SrcOp &Size,
128                                                          Align Alignment) {
129   assert(Res.getLLTTy(*getMRI()).isPointer() && "expected ptr dst type");
130   auto MIB = buildInstr(TargetOpcode::G_DYN_STACKALLOC);
131   Res.addDefToMIB(*getMRI(), MIB);
132   Size.addSrcToMIB(MIB);
133   MIB.addImm(Alignment.value());
134   return MIB;
135 }
136 
137 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
138                                                       int Idx) {
139   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
140   auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
141   Res.addDefToMIB(*getMRI(), MIB);
142   MIB.addFrameIndex(Idx);
143   return MIB;
144 }
145 
146 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
147                                                        const GlobalValue *GV) {
148   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
149   assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
150              GV->getType()->getAddressSpace() &&
151          "address space mismatch");
152 
153   auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
154   Res.addDefToMIB(*getMRI(), MIB);
155   MIB.addGlobalAddress(GV);
156   return MIB;
157 }
158 
159 MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
160                                                      unsigned JTI) {
161   return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
162       .addJumpTableIndex(JTI);
163 }
164 
165 void MachineIRBuilder::validateUnaryOp(const LLT Res, const LLT Op0) {
166   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
167   assert((Res == Op0) && "type mismatch");
168 }
169 
170 void MachineIRBuilder::validateBinaryOp(const LLT Res, const LLT Op0,
171                                         const LLT Op1) {
172   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
173   assert((Res == Op0 && Res == Op1) && "type mismatch");
174 }
175 
176 void MachineIRBuilder::validateShiftOp(const LLT Res, const LLT Op0,
177                                        const LLT Op1) {
178   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
179   assert((Res == Op0) && "type mismatch");
180 }
181 
182 MachineInstrBuilder MachineIRBuilder::buildPtrAdd(const DstOp &Res,
183                                                   const SrcOp &Op0,
184                                                   const SrcOp &Op1) {
185   assert(Res.getLLTTy(*getMRI()).getScalarType().isPointer() &&
186          Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
187   assert(Op1.getLLTTy(*getMRI()).getScalarType().isScalar() && "invalid offset type");
188 
189   return buildInstr(TargetOpcode::G_PTR_ADD, {Res}, {Op0, Op1});
190 }
191 
192 Optional<MachineInstrBuilder>
193 MachineIRBuilder::materializePtrAdd(Register &Res, Register Op0,
194                                     const LLT ValueTy, uint64_t Value) {
195   assert(Res == 0 && "Res is a result argument");
196   assert(ValueTy.isScalar()  && "invalid offset type");
197 
198   if (Value == 0) {
199     Res = Op0;
200     return None;
201   }
202 
203   Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
204   auto Cst = buildConstant(ValueTy, Value);
205   return buildPtrAdd(Res, Op0, Cst.getReg(0));
206 }
207 
208 MachineInstrBuilder MachineIRBuilder::buildMaskLowPtrBits(const DstOp &Res,
209                                                           const SrcOp &Op0,
210                                                           uint32_t NumBits) {
211   LLT PtrTy = Res.getLLTTy(*getMRI());
212   LLT MaskTy = LLT::scalar(PtrTy.getSizeInBits());
213   Register MaskReg = getMRI()->createGenericVirtualRegister(MaskTy);
214   buildConstant(MaskReg, maskTrailingZeros<uint64_t>(NumBits));
215   return buildPtrMask(Res, Op0, MaskReg);
216 }
217 
218 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
219   return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
220 }
221 
222 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
223   assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
224   return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
225 }
226 
227 MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
228                                                 unsigned JTI,
229                                                 Register IndexReg) {
230   assert(getMRI()->getType(TablePtr).isPointer() &&
231          "Table reg must be a pointer");
232   return buildInstr(TargetOpcode::G_BRJT)
233       .addUse(TablePtr)
234       .addJumpTableIndex(JTI)
235       .addUse(IndexReg);
236 }
237 
238 MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
239                                                 const SrcOp &Op) {
240   return buildInstr(TargetOpcode::COPY, Res, Op);
241 }
242 
243 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
244                                                     const ConstantInt &Val) {
245   LLT Ty = Res.getLLTTy(*getMRI());
246   LLT EltTy = Ty.getScalarType();
247   assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
248          "creating constant with the wrong size");
249 
250   if (Ty.isVector()) {
251     auto Const = buildInstr(TargetOpcode::G_CONSTANT)
252     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
253     .addCImm(&Val);
254     return buildSplatVector(Res, Const);
255   }
256 
257   auto Const = buildInstr(TargetOpcode::G_CONSTANT);
258   Const->setDebugLoc(DebugLoc());
259   Res.addDefToMIB(*getMRI(), Const);
260   Const.addCImm(&Val);
261   return Const;
262 }
263 
264 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
265                                                     int64_t Val) {
266   auto IntN = IntegerType::get(getMF().getFunction().getContext(),
267                                Res.getLLTTy(*getMRI()).getScalarSizeInBits());
268   ConstantInt *CI = ConstantInt::get(IntN, Val, true);
269   return buildConstant(Res, *CI);
270 }
271 
272 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
273                                                      const ConstantFP &Val) {
274   LLT Ty = Res.getLLTTy(*getMRI());
275   LLT EltTy = Ty.getScalarType();
276 
277   assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
278          == EltTy.getSizeInBits() &&
279          "creating fconstant with the wrong size");
280 
281   assert(!Ty.isPointer() && "invalid operand type");
282 
283   if (Ty.isVector()) {
284     auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
285     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
286     .addFPImm(&Val);
287 
288     return buildSplatVector(Res, Const);
289   }
290 
291   auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
292   Const->setDebugLoc(DebugLoc());
293   Res.addDefToMIB(*getMRI(), Const);
294   Const.addFPImm(&Val);
295   return Const;
296 }
297 
298 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
299                                                     const APInt &Val) {
300   ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
301   return buildConstant(Res, *CI);
302 }
303 
304 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
305                                                      double Val) {
306   LLT DstTy = Res.getLLTTy(*getMRI());
307   auto &Ctx = getMF().getFunction().getContext();
308   auto *CFP =
309       ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
310   return buildFConstant(Res, *CFP);
311 }
312 
313 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
314                                                      const APFloat &Val) {
315   auto &Ctx = getMF().getFunction().getContext();
316   auto *CFP = ConstantFP::get(Ctx, Val);
317   return buildFConstant(Res, *CFP);
318 }
319 
320 MachineInstrBuilder MachineIRBuilder::buildBrCond(Register Tst,
321                                                   MachineBasicBlock &Dest) {
322   assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
323 
324   return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
325 }
326 
327 MachineInstrBuilder
328 MachineIRBuilder::buildLoad(const DstOp &Dst, const SrcOp &Addr,
329                             MachinePointerInfo PtrInfo, Align Alignment,
330                             MachineMemOperand::Flags MMOFlags,
331                             const AAMDNodes &AAInfo) {
332   MMOFlags |= MachineMemOperand::MOLoad;
333   assert((MMOFlags & MachineMemOperand::MOStore) == 0);
334 
335   uint64_t Size = MemoryLocation::getSizeOrUnknown(
336       TypeSize::Fixed(Dst.getLLTTy(*getMRI()).getSizeInBytes()));
337   MachineMemOperand *MMO =
338       getMF().getMachineMemOperand(PtrInfo, MMOFlags, Size, Alignment, AAInfo);
339   return buildLoad(Dst, Addr, *MMO);
340 }
341 
342 MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
343                                                      const DstOp &Res,
344                                                      const SrcOp &Addr,
345                                                      MachineMemOperand &MMO) {
346   assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
347   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
348 
349   auto MIB = buildInstr(Opcode);
350   Res.addDefToMIB(*getMRI(), MIB);
351   Addr.addSrcToMIB(MIB);
352   MIB.addMemOperand(&MMO);
353   return MIB;
354 }
355 
356 MachineInstrBuilder MachineIRBuilder::buildLoadFromOffset(
357   const DstOp &Dst, const SrcOp &BasePtr,
358   MachineMemOperand &BaseMMO, int64_t Offset) {
359   LLT LoadTy = Dst.getLLTTy(*getMRI());
360   MachineMemOperand *OffsetMMO =
361     getMF().getMachineMemOperand(&BaseMMO, Offset, LoadTy.getSizeInBytes());
362 
363   if (Offset == 0) // This may be a size or type changing load.
364     return buildLoad(Dst, BasePtr, *OffsetMMO);
365 
366   LLT PtrTy = BasePtr.getLLTTy(*getMRI());
367   LLT OffsetTy = LLT::scalar(PtrTy.getSizeInBits());
368   auto ConstOffset = buildConstant(OffsetTy, Offset);
369   auto Ptr = buildPtrAdd(PtrTy, BasePtr, ConstOffset);
370   return buildLoad(Dst, Ptr, *OffsetMMO);
371 }
372 
373 MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
374                                                  const SrcOp &Addr,
375                                                  MachineMemOperand &MMO) {
376   assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
377   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
378 
379   auto MIB = buildInstr(TargetOpcode::G_STORE);
380   Val.addSrcToMIB(MIB);
381   Addr.addSrcToMIB(MIB);
382   MIB.addMemOperand(&MMO);
383   return MIB;
384 }
385 
386 MachineInstrBuilder
387 MachineIRBuilder::buildStore(const SrcOp &Val, const SrcOp &Addr,
388                              MachinePointerInfo PtrInfo, Align Alignment,
389                              MachineMemOperand::Flags MMOFlags,
390                              const AAMDNodes &AAInfo) {
391   MMOFlags |= MachineMemOperand::MOStore;
392   assert((MMOFlags & MachineMemOperand::MOLoad) == 0);
393 
394   uint64_t Size = MemoryLocation::getSizeOrUnknown(
395       TypeSize::Fixed(Val.getLLTTy(*getMRI()).getSizeInBytes()));
396   MachineMemOperand *MMO =
397       getMF().getMachineMemOperand(PtrInfo, MMOFlags, Size, Alignment, AAInfo);
398   return buildStore(Val, Addr, *MMO);
399 }
400 
401 MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
402                                                   const SrcOp &Op) {
403   return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
404 }
405 
406 MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
407                                                 const SrcOp &Op) {
408   return buildInstr(TargetOpcode::G_SEXT, Res, Op);
409 }
410 
411 MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
412                                                 const SrcOp &Op) {
413   return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
414 }
415 
416 unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
417   const auto *TLI = getMF().getSubtarget().getTargetLowering();
418   switch (TLI->getBooleanContents(IsVec, IsFP)) {
419   case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
420     return TargetOpcode::G_SEXT;
421   case TargetLoweringBase::ZeroOrOneBooleanContent:
422     return TargetOpcode::G_ZEXT;
423   default:
424     return TargetOpcode::G_ANYEXT;
425   }
426 }
427 
428 MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
429                                                    const SrcOp &Op,
430                                                    bool IsFP) {
431   unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
432   return buildInstr(ExtOp, Res, Op);
433 }
434 
435 MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
436                                                       const DstOp &Res,
437                                                       const SrcOp &Op) {
438   assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
439           TargetOpcode::G_SEXT == ExtOpc) &&
440          "Expecting Extending Opc");
441   assert(Res.getLLTTy(*getMRI()).isScalar() ||
442          Res.getLLTTy(*getMRI()).isVector());
443   assert(Res.getLLTTy(*getMRI()).isScalar() ==
444          Op.getLLTTy(*getMRI()).isScalar());
445 
446   unsigned Opcode = TargetOpcode::COPY;
447   if (Res.getLLTTy(*getMRI()).getSizeInBits() >
448       Op.getLLTTy(*getMRI()).getSizeInBits())
449     Opcode = ExtOpc;
450   else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
451            Op.getLLTTy(*getMRI()).getSizeInBits())
452     Opcode = TargetOpcode::G_TRUNC;
453   else
454     assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
455 
456   return buildInstr(Opcode, Res, Op);
457 }
458 
459 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
460                                                        const SrcOp &Op) {
461   return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
462 }
463 
464 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
465                                                        const SrcOp &Op) {
466   return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
467 }
468 
469 MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
470                                                          const SrcOp &Op) {
471   return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
472 }
473 
474 MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
475                                                 const SrcOp &Src) {
476   LLT SrcTy = Src.getLLTTy(*getMRI());
477   LLT DstTy = Dst.getLLTTy(*getMRI());
478   if (SrcTy == DstTy)
479     return buildCopy(Dst, Src);
480 
481   unsigned Opcode;
482   if (SrcTy.isPointer() && DstTy.isScalar())
483     Opcode = TargetOpcode::G_PTRTOINT;
484   else if (DstTy.isPointer() && SrcTy.isScalar())
485     Opcode = TargetOpcode::G_INTTOPTR;
486   else {
487     assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
488     Opcode = TargetOpcode::G_BITCAST;
489   }
490 
491   return buildInstr(Opcode, Dst, Src);
492 }
493 
494 MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
495                                                    const SrcOp &Src,
496                                                    uint64_t Index) {
497   LLT SrcTy = Src.getLLTTy(*getMRI());
498   LLT DstTy = Dst.getLLTTy(*getMRI());
499 
500 #ifndef NDEBUG
501   assert(SrcTy.isValid() && "invalid operand type");
502   assert(DstTy.isValid() && "invalid operand type");
503   assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
504          "extracting off end of register");
505 #endif
506 
507   if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
508     assert(Index == 0 && "insertion past the end of a register");
509     return buildCast(Dst, Src);
510   }
511 
512   auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
513   Dst.addDefToMIB(*getMRI(), Extract);
514   Src.addSrcToMIB(Extract);
515   Extract.addImm(Index);
516   return Extract;
517 }
518 
519 void MachineIRBuilder::buildSequence(Register Res, ArrayRef<Register> Ops,
520                                      ArrayRef<uint64_t> Indices) {
521 #ifndef NDEBUG
522   assert(Ops.size() == Indices.size() && "incompatible args");
523   assert(!Ops.empty() && "invalid trivial sequence");
524   assert(llvm::is_sorted(Indices) &&
525          "sequence offsets must be in ascending order");
526 
527   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
528   for (auto Op : Ops)
529     assert(getMRI()->getType(Op).isValid() && "invalid operand type");
530 #endif
531 
532   LLT ResTy = getMRI()->getType(Res);
533   LLT OpTy = getMRI()->getType(Ops[0]);
534   unsigned OpSize = OpTy.getSizeInBits();
535   bool MaybeMerge = true;
536   for (unsigned i = 0; i < Ops.size(); ++i) {
537     if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
538       MaybeMerge = false;
539       break;
540     }
541   }
542 
543   if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
544     buildMerge(Res, Ops);
545     return;
546   }
547 
548   Register ResIn = getMRI()->createGenericVirtualRegister(ResTy);
549   buildUndef(ResIn);
550 
551   for (unsigned i = 0; i < Ops.size(); ++i) {
552     Register ResOut = i + 1 == Ops.size()
553                           ? Res
554                           : getMRI()->createGenericVirtualRegister(ResTy);
555     buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
556     ResIn = ResOut;
557   }
558 }
559 
560 MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
561   return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
562 }
563 
564 MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
565                                                  ArrayRef<Register> Ops) {
566   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
567   // we need some temporary storage for the DstOp objects. Here we use a
568   // sufficiently large SmallVector to not go through the heap.
569   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
570   assert(TmpVec.size() > 1);
571   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
572 }
573 
574 MachineInstrBuilder
575 MachineIRBuilder::buildMerge(const DstOp &Res,
576                              std::initializer_list<SrcOp> Ops) {
577   assert(Ops.size() > 1);
578   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, Ops);
579 }
580 
581 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
582                                                    const SrcOp &Op) {
583   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
584   // we need some temporary storage for the DstOp objects. Here we use a
585   // sufficiently large SmallVector to not go through the heap.
586   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
587   assert(TmpVec.size() > 1);
588   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
589 }
590 
591 MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
592                                                    const SrcOp &Op) {
593   unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
594   SmallVector<Register, 8> TmpVec;
595   for (unsigned I = 0; I != NumReg; ++I)
596     TmpVec.push_back(getMRI()->createGenericVirtualRegister(Res));
597   return buildUnmerge(TmpVec, Op);
598 }
599 
600 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
601                                                    const SrcOp &Op) {
602   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
603   // we need some temporary storage for the DstOp objects. Here we use a
604   // sufficiently large SmallVector to not go through the heap.
605   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
606   assert(TmpVec.size() > 1);
607   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
608 }
609 
610 MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
611                                                        ArrayRef<Register> Ops) {
612   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
613   // we need some temporary storage for the DstOp objects. Here we use a
614   // sufficiently large SmallVector to not go through the heap.
615   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
616   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
617 }
618 
619 MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
620                                                        const SrcOp &Src) {
621   SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
622   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
623 }
624 
625 MachineInstrBuilder
626 MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
627                                         ArrayRef<Register> Ops) {
628   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
629   // we need some temporary storage for the DstOp objects. Here we use a
630   // sufficiently large SmallVector to not go through the heap.
631   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
632   return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
633 }
634 
635 MachineInstrBuilder
636 MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
637   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
638   // we need some temporary storage for the DstOp objects. Here we use a
639   // sufficiently large SmallVector to not go through the heap.
640   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
641   return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
642 }
643 
644 MachineInstrBuilder MachineIRBuilder::buildInsert(const DstOp &Res,
645                                                   const SrcOp &Src,
646                                                   const SrcOp &Op,
647                                                   unsigned Index) {
648   assert(Index + Op.getLLTTy(*getMRI()).getSizeInBits() <=
649              Res.getLLTTy(*getMRI()).getSizeInBits() &&
650          "insertion past the end of a register");
651 
652   if (Res.getLLTTy(*getMRI()).getSizeInBits() ==
653       Op.getLLTTy(*getMRI()).getSizeInBits()) {
654     return buildCast(Res, Op);
655   }
656 
657   return buildInstr(TargetOpcode::G_INSERT, Res, {Src, Op, uint64_t(Index)});
658 }
659 
660 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
661                                                      ArrayRef<Register> ResultRegs,
662                                                      bool HasSideEffects) {
663   auto MIB =
664       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
665                                 : TargetOpcode::G_INTRINSIC);
666   for (unsigned ResultReg : ResultRegs)
667     MIB.addDef(ResultReg);
668   MIB.addIntrinsicID(ID);
669   return MIB;
670 }
671 
672 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
673                                                      ArrayRef<DstOp> Results,
674                                                      bool HasSideEffects) {
675   auto MIB =
676       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
677                                 : TargetOpcode::G_INTRINSIC);
678   for (DstOp Result : Results)
679     Result.addDefToMIB(*getMRI(), MIB);
680   MIB.addIntrinsicID(ID);
681   return MIB;
682 }
683 
684 MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
685                                                  const SrcOp &Op) {
686   return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
687 }
688 
689 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
690                                                    const SrcOp &Op,
691                                                    Optional<unsigned> Flags) {
692   return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op, Flags);
693 }
694 
695 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
696                                                 const DstOp &Res,
697                                                 const SrcOp &Op0,
698                                                 const SrcOp &Op1) {
699   return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
700 }
701 
702 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
703                                                 const DstOp &Res,
704                                                 const SrcOp &Op0,
705                                                 const SrcOp &Op1,
706                                                 Optional<unsigned> Flags) {
707 
708   return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
709 }
710 
711 MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
712                                                   const SrcOp &Tst,
713                                                   const SrcOp &Op0,
714                                                   const SrcOp &Op1,
715                                                   Optional<unsigned> Flags) {
716 
717   return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
718 }
719 
720 MachineInstrBuilder
721 MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
722                                            const SrcOp &Elt, const SrcOp &Idx) {
723   return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
724 }
725 
726 MachineInstrBuilder
727 MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
728                                             const SrcOp &Idx) {
729   return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
730 }
731 
732 MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
733     Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
734     Register NewVal, MachineMemOperand &MMO) {
735 #ifndef NDEBUG
736   LLT OldValResTy = getMRI()->getType(OldValRes);
737   LLT SuccessResTy = getMRI()->getType(SuccessRes);
738   LLT AddrTy = getMRI()->getType(Addr);
739   LLT CmpValTy = getMRI()->getType(CmpVal);
740   LLT NewValTy = getMRI()->getType(NewVal);
741   assert(OldValResTy.isScalar() && "invalid operand type");
742   assert(SuccessResTy.isScalar() && "invalid operand type");
743   assert(AddrTy.isPointer() && "invalid operand type");
744   assert(CmpValTy.isValid() && "invalid operand type");
745   assert(NewValTy.isValid() && "invalid operand type");
746   assert(OldValResTy == CmpValTy && "type mismatch");
747   assert(OldValResTy == NewValTy && "type mismatch");
748 #endif
749 
750   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
751       .addDef(OldValRes)
752       .addDef(SuccessRes)
753       .addUse(Addr)
754       .addUse(CmpVal)
755       .addUse(NewVal)
756       .addMemOperand(&MMO);
757 }
758 
759 MachineInstrBuilder
760 MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
761                                      Register CmpVal, Register NewVal,
762                                      MachineMemOperand &MMO) {
763 #ifndef NDEBUG
764   LLT OldValResTy = getMRI()->getType(OldValRes);
765   LLT AddrTy = getMRI()->getType(Addr);
766   LLT CmpValTy = getMRI()->getType(CmpVal);
767   LLT NewValTy = getMRI()->getType(NewVal);
768   assert(OldValResTy.isScalar() && "invalid operand type");
769   assert(AddrTy.isPointer() && "invalid operand type");
770   assert(CmpValTy.isValid() && "invalid operand type");
771   assert(NewValTy.isValid() && "invalid operand type");
772   assert(OldValResTy == CmpValTy && "type mismatch");
773   assert(OldValResTy == NewValTy && "type mismatch");
774 #endif
775 
776   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
777       .addDef(OldValRes)
778       .addUse(Addr)
779       .addUse(CmpVal)
780       .addUse(NewVal)
781       .addMemOperand(&MMO);
782 }
783 
784 MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(
785   unsigned Opcode, const DstOp &OldValRes,
786   const SrcOp &Addr, const SrcOp &Val,
787   MachineMemOperand &MMO) {
788 
789 #ifndef NDEBUG
790   LLT OldValResTy = OldValRes.getLLTTy(*getMRI());
791   LLT AddrTy = Addr.getLLTTy(*getMRI());
792   LLT ValTy = Val.getLLTTy(*getMRI());
793   assert(OldValResTy.isScalar() && "invalid operand type");
794   assert(AddrTy.isPointer() && "invalid operand type");
795   assert(ValTy.isValid() && "invalid operand type");
796   assert(OldValResTy == ValTy && "type mismatch");
797   assert(MMO.isAtomic() && "not atomic mem operand");
798 #endif
799 
800   auto MIB = buildInstr(Opcode);
801   OldValRes.addDefToMIB(*getMRI(), MIB);
802   Addr.addSrcToMIB(MIB);
803   Val.addSrcToMIB(MIB);
804   MIB.addMemOperand(&MMO);
805   return MIB;
806 }
807 
808 MachineInstrBuilder
809 MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
810                                      Register Val, MachineMemOperand &MMO) {
811   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
812                         MMO);
813 }
814 MachineInstrBuilder
815 MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
816                                     Register Val, MachineMemOperand &MMO) {
817   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
818                         MMO);
819 }
820 MachineInstrBuilder
821 MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
822                                     Register Val, MachineMemOperand &MMO) {
823   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
824                         MMO);
825 }
826 MachineInstrBuilder
827 MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
828                                     Register Val, MachineMemOperand &MMO) {
829   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
830                         MMO);
831 }
832 MachineInstrBuilder
833 MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
834                                      Register Val, MachineMemOperand &MMO) {
835   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
836                         MMO);
837 }
838 MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
839                                                        Register Addr,
840                                                        Register Val,
841                                                        MachineMemOperand &MMO) {
842   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
843                         MMO);
844 }
845 MachineInstrBuilder
846 MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
847                                     Register Val, MachineMemOperand &MMO) {
848   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
849                         MMO);
850 }
851 MachineInstrBuilder
852 MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
853                                     Register Val, MachineMemOperand &MMO) {
854   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
855                         MMO);
856 }
857 MachineInstrBuilder
858 MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
859                                     Register Val, MachineMemOperand &MMO) {
860   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
861                         MMO);
862 }
863 MachineInstrBuilder
864 MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
865                                      Register Val, MachineMemOperand &MMO) {
866   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
867                         MMO);
868 }
869 MachineInstrBuilder
870 MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
871                                      Register Val, MachineMemOperand &MMO) {
872   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
873                         MMO);
874 }
875 
876 MachineInstrBuilder
877 MachineIRBuilder::buildAtomicRMWFAdd(
878   const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
879   MachineMemOperand &MMO) {
880   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
881                         MMO);
882 }
883 
884 MachineInstrBuilder
885 MachineIRBuilder::buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
886                                      MachineMemOperand &MMO) {
887   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
888                         MMO);
889 }
890 
891 MachineInstrBuilder
892 MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
893   return buildInstr(TargetOpcode::G_FENCE)
894     .addImm(Ordering)
895     .addImm(Scope);
896 }
897 
898 MachineInstrBuilder
899 MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
900 #ifndef NDEBUG
901   assert(getMRI()->getType(Res).isPointer() && "invalid res type");
902 #endif
903 
904   return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
905 }
906 
907 void MachineIRBuilder::validateTruncExt(const LLT DstTy, const LLT SrcTy,
908                                         bool IsExtend) {
909 #ifndef NDEBUG
910   if (DstTy.isVector()) {
911     assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
912     assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
913            "different number of elements in a trunc/ext");
914   } else
915     assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
916 
917   if (IsExtend)
918     assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
919            "invalid narrowing extend");
920   else
921     assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
922            "invalid widening trunc");
923 #endif
924 }
925 
926 void MachineIRBuilder::validateSelectOp(const LLT ResTy, const LLT TstTy,
927                                         const LLT Op0Ty, const LLT Op1Ty) {
928 #ifndef NDEBUG
929   assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
930          "invalid operand type");
931   assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
932   if (ResTy.isScalar() || ResTy.isPointer())
933     assert(TstTy.isScalar() && "type mismatch");
934   else
935     assert((TstTy.isScalar() ||
936             (TstTy.isVector() &&
937              TstTy.getNumElements() == Op0Ty.getNumElements())) &&
938            "type mismatch");
939 #endif
940 }
941 
942 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
943                                                  ArrayRef<DstOp> DstOps,
944                                                  ArrayRef<SrcOp> SrcOps,
945                                                  Optional<unsigned> Flags) {
946   switch (Opc) {
947   default:
948     break;
949   case TargetOpcode::G_SELECT: {
950     assert(DstOps.size() == 1 && "Invalid select");
951     assert(SrcOps.size() == 3 && "Invalid select");
952     validateSelectOp(
953         DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
954         SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
955     break;
956   }
957   case TargetOpcode::G_FNEG:
958   case TargetOpcode::G_ABS:
959     // All these are unary ops.
960     assert(DstOps.size() == 1 && "Invalid Dst");
961     assert(SrcOps.size() == 1 && "Invalid Srcs");
962     validateUnaryOp(DstOps[0].getLLTTy(*getMRI()),
963                     SrcOps[0].getLLTTy(*getMRI()));
964     break;
965   case TargetOpcode::G_ADD:
966   case TargetOpcode::G_AND:
967   case TargetOpcode::G_MUL:
968   case TargetOpcode::G_OR:
969   case TargetOpcode::G_SUB:
970   case TargetOpcode::G_XOR:
971   case TargetOpcode::G_UDIV:
972   case TargetOpcode::G_SDIV:
973   case TargetOpcode::G_UREM:
974   case TargetOpcode::G_SREM:
975   case TargetOpcode::G_SMIN:
976   case TargetOpcode::G_SMAX:
977   case TargetOpcode::G_UMIN:
978   case TargetOpcode::G_UMAX:
979   case TargetOpcode::G_UADDSAT:
980   case TargetOpcode::G_SADDSAT:
981   case TargetOpcode::G_USUBSAT:
982   case TargetOpcode::G_SSUBSAT: {
983     // All these are binary ops.
984     assert(DstOps.size() == 1 && "Invalid Dst");
985     assert(SrcOps.size() == 2 && "Invalid Srcs");
986     validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
987                      SrcOps[0].getLLTTy(*getMRI()),
988                      SrcOps[1].getLLTTy(*getMRI()));
989     break;
990   }
991   case TargetOpcode::G_SHL:
992   case TargetOpcode::G_ASHR:
993   case TargetOpcode::G_LSHR:
994   case TargetOpcode::G_USHLSAT:
995   case TargetOpcode::G_SSHLSAT: {
996     assert(DstOps.size() == 1 && "Invalid Dst");
997     assert(SrcOps.size() == 2 && "Invalid Srcs");
998     validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
999                     SrcOps[0].getLLTTy(*getMRI()),
1000                     SrcOps[1].getLLTTy(*getMRI()));
1001     break;
1002   }
1003   case TargetOpcode::G_SEXT:
1004   case TargetOpcode::G_ZEXT:
1005   case TargetOpcode::G_ANYEXT:
1006     assert(DstOps.size() == 1 && "Invalid Dst");
1007     assert(SrcOps.size() == 1 && "Invalid Srcs");
1008     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
1009                      SrcOps[0].getLLTTy(*getMRI()), true);
1010     break;
1011   case TargetOpcode::G_TRUNC:
1012   case TargetOpcode::G_FPTRUNC: {
1013     assert(DstOps.size() == 1 && "Invalid Dst");
1014     assert(SrcOps.size() == 1 && "Invalid Srcs");
1015     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
1016                      SrcOps[0].getLLTTy(*getMRI()), false);
1017     break;
1018   }
1019   case TargetOpcode::G_BITCAST: {
1020     assert(DstOps.size() == 1 && "Invalid Dst");
1021     assert(SrcOps.size() == 1 && "Invalid Srcs");
1022     assert(DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1023            SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() && "invalid bitcast");
1024     break;
1025   }
1026   case TargetOpcode::COPY:
1027     assert(DstOps.size() == 1 && "Invalid Dst");
1028     // If the caller wants to add a subreg source it has to be done separately
1029     // so we may not have any SrcOps at this point yet.
1030     break;
1031   case TargetOpcode::G_FCMP:
1032   case TargetOpcode::G_ICMP: {
1033     assert(DstOps.size() == 1 && "Invalid Dst Operands");
1034     assert(SrcOps.size() == 3 && "Invalid Src Operands");
1035     // For F/ICMP, the first src operand is the predicate, followed by
1036     // the two comparands.
1037     assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
1038            "Expecting predicate");
1039     assert([&]() -> bool {
1040       CmpInst::Predicate Pred = SrcOps[0].getPredicate();
1041       return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
1042                                          : CmpInst::isFPPredicate(Pred);
1043     }() && "Invalid predicate");
1044     assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1045            "Type mismatch");
1046     assert([&]() -> bool {
1047       LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
1048       LLT DstTy = DstOps[0].getLLTTy(*getMRI());
1049       if (Op0Ty.isScalar() || Op0Ty.isPointer())
1050         return DstTy.isScalar();
1051       else
1052         return DstTy.isVector() &&
1053                DstTy.getNumElements() == Op0Ty.getNumElements();
1054     }() && "Type Mismatch");
1055     break;
1056   }
1057   case TargetOpcode::G_UNMERGE_VALUES: {
1058     assert(!DstOps.empty() && "Invalid trivial sequence");
1059     assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
1060     assert(std::all_of(DstOps.begin(), DstOps.end(),
1061                        [&, this](const DstOp &Op) {
1062                          return Op.getLLTTy(*getMRI()) ==
1063                                 DstOps[0].getLLTTy(*getMRI());
1064                        }) &&
1065            "type mismatch in output list");
1066     assert(DstOps.size() * DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1067                SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1068            "input operands do not cover output register");
1069     break;
1070   }
1071   case TargetOpcode::G_MERGE_VALUES: {
1072     assert(!SrcOps.empty() && "invalid trivial sequence");
1073     assert(DstOps.size() == 1 && "Invalid Dst");
1074     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1075                        [&, this](const SrcOp &Op) {
1076                          return Op.getLLTTy(*getMRI()) ==
1077                                 SrcOps[0].getLLTTy(*getMRI());
1078                        }) &&
1079            "type mismatch in input list");
1080     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1081                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1082            "input operands do not cover output register");
1083     if (SrcOps.size() == 1)
1084       return buildCast(DstOps[0], SrcOps[0]);
1085     if (DstOps[0].getLLTTy(*getMRI()).isVector()) {
1086       if (SrcOps[0].getLLTTy(*getMRI()).isVector())
1087         return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
1088       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1089     }
1090     break;
1091   }
1092   case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
1093     assert(DstOps.size() == 1 && "Invalid Dst size");
1094     assert(SrcOps.size() == 2 && "Invalid Src size");
1095     assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1096     assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
1097             DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
1098            "Invalid operand type");
1099     assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
1100     assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
1101                DstOps[0].getLLTTy(*getMRI()) &&
1102            "Type mismatch");
1103     break;
1104   }
1105   case TargetOpcode::G_INSERT_VECTOR_ELT: {
1106     assert(DstOps.size() == 1 && "Invalid dst size");
1107     assert(SrcOps.size() == 3 && "Invalid src size");
1108     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1109            SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1110     assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
1111                SrcOps[1].getLLTTy(*getMRI()) &&
1112            "Type mismatch");
1113     assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
1114     assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
1115                SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
1116            "Type mismatch");
1117     break;
1118   }
1119   case TargetOpcode::G_BUILD_VECTOR: {
1120     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1121            "Must have at least 2 operands");
1122     assert(DstOps.size() == 1 && "Invalid DstOps");
1123     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1124            "Res type must be a vector");
1125     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1126                        [&, this](const SrcOp &Op) {
1127                          return Op.getLLTTy(*getMRI()) ==
1128                                 SrcOps[0].getLLTTy(*getMRI());
1129                        }) &&
1130            "type mismatch in input list");
1131     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1132                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1133            "input scalars do not exactly cover the output vector register");
1134     break;
1135   }
1136   case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1137     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1138            "Must have at least 2 operands");
1139     assert(DstOps.size() == 1 && "Invalid DstOps");
1140     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1141            "Res type must be a vector");
1142     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1143                        [&, this](const SrcOp &Op) {
1144                          return Op.getLLTTy(*getMRI()) ==
1145                                 SrcOps[0].getLLTTy(*getMRI());
1146                        }) &&
1147            "type mismatch in input list");
1148     if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1149         DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
1150       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1151     break;
1152   }
1153   case TargetOpcode::G_CONCAT_VECTORS: {
1154     assert(DstOps.size() == 1 && "Invalid DstOps");
1155     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1156            "Must have at least 2 operands");
1157     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1158                        [&, this](const SrcOp &Op) {
1159                          return (Op.getLLTTy(*getMRI()).isVector() &&
1160                                  Op.getLLTTy(*getMRI()) ==
1161                                      SrcOps[0].getLLTTy(*getMRI()));
1162                        }) &&
1163            "type mismatch in input list");
1164     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1165                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1166            "input vectors do not exactly cover the output vector register");
1167     break;
1168   }
1169   case TargetOpcode::G_UADDE: {
1170     assert(DstOps.size() == 2 && "Invalid no of dst operands");
1171     assert(SrcOps.size() == 3 && "Invalid no of src operands");
1172     assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1173     assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1174            (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1175            "Invalid operand");
1176     assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1177     assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1178            "type mismatch");
1179     break;
1180   }
1181   }
1182 
1183   auto MIB = buildInstr(Opc);
1184   for (const DstOp &Op : DstOps)
1185     Op.addDefToMIB(*getMRI(), MIB);
1186   for (const SrcOp &Op : SrcOps)
1187     Op.addSrcToMIB(MIB);
1188   if (Flags)
1189     MIB->setFlags(*Flags);
1190   return MIB;
1191 }
1192