1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 /// \file
9 /// This file implements the MachineIRBuidler class.
10 //===----------------------------------------------------------------------===//
11 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12 #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
13 
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetLowering.h"
20 #include "llvm/CodeGen/TargetOpcodes.h"
21 #include "llvm/CodeGen/TargetSubtargetInfo.h"
22 #include "llvm/IR/DebugInfo.h"
23 
24 using namespace llvm;
25 
26 void MachineIRBuilder::setMF(MachineFunction &MF) {
27   State.MF = &MF;
28   State.MBB = nullptr;
29   State.MRI = &MF.getRegInfo();
30   State.TII = MF.getSubtarget().getInstrInfo();
31   State.DL = DebugLoc();
32   State.II = MachineBasicBlock::iterator();
33   State.Observer = nullptr;
34 }
35 
36 void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) {
37   State.MBB = &MBB;
38   State.II = MBB.end();
39   assert(&getMF() == MBB.getParent() &&
40          "Basic block is in a different function");
41 }
42 
43 void MachineIRBuilder::setInstr(MachineInstr &MI) {
44   assert(MI.getParent() && "Instruction is not part of a basic block");
45   setMBB(*MI.getParent());
46   State.II = MI.getIterator();
47 }
48 
49 void MachineIRBuilder::setCSEInfo(GISelCSEInfo *Info) { State.CSEInfo = Info; }
50 
51 void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB,
52                                    MachineBasicBlock::iterator II) {
53   assert(MBB.getParent() == &getMF() &&
54          "Basic block is in a different function");
55   State.MBB = &MBB;
56   State.II = II;
57 }
58 
59 void MachineIRBuilder::recordInsertion(MachineInstr *InsertedInstr) const {
60   if (State.Observer)
61     State.Observer->createdInstr(*InsertedInstr);
62 }
63 
64 void MachineIRBuilder::setChangeObserver(GISelChangeObserver &Observer) {
65   State.Observer = &Observer;
66 }
67 
68 void MachineIRBuilder::stopObservingChanges() { State.Observer = nullptr; }
69 
70 //------------------------------------------------------------------------------
71 // Build instruction variants.
72 //------------------------------------------------------------------------------
73 
74 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) {
75   return insertInstr(buildInstrNoInsert(Opcode));
76 }
77 
78 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
79   MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80   return MIB;
81 }
82 
83 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
84   getMBB().insert(getInsertPt(), MIB);
85   recordInsertion(MIB);
86   return MIB;
87 }
88 
89 MachineInstrBuilder
90 MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
91                                       const MDNode *Expr) {
92   assert(isa<DILocalVariable>(Variable) && "not a variable");
93   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94   assert(
95       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96       "Expected inlined-at fields to agree");
97   return insertInstr(BuildMI(getMF(), getDL(),
98                              getTII().get(TargetOpcode::DBG_VALUE),
99                              /*IsIndirect*/ false, Reg, Variable, Expr));
100 }
101 
102 MachineInstrBuilder
103 MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
104                                         const MDNode *Expr) {
105   assert(isa<DILocalVariable>(Variable) && "not a variable");
106   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
107   assert(
108       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
109       "Expected inlined-at fields to agree");
110   return insertInstr(BuildMI(getMF(), getDL(),
111                              getTII().get(TargetOpcode::DBG_VALUE),
112                              /*IsIndirect*/ true, Reg, Variable, Expr));
113 }
114 
115 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
116                                                       const MDNode *Variable,
117                                                       const MDNode *Expr) {
118   assert(isa<DILocalVariable>(Variable) && "not a variable");
119   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
120   assert(
121       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
122       "Expected inlined-at fields to agree");
123   return buildInstr(TargetOpcode::DBG_VALUE)
124       .addFrameIndex(FI)
125       .addImm(0)
126       .addMetadata(Variable)
127       .addMetadata(Expr);
128 }
129 
130 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
131                                                          const MDNode *Variable,
132                                                          const MDNode *Expr) {
133   assert(isa<DILocalVariable>(Variable) && "not a variable");
134   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
135   assert(
136       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
137       "Expected inlined-at fields to agree");
138   auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
139   if (auto *CI = dyn_cast<ConstantInt>(&C)) {
140     if (CI->getBitWidth() > 64)
141       MIB.addCImm(CI);
142     else
143       MIB.addImm(CI->getZExtValue());
144   } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
145     MIB.addFPImm(CFP);
146   } else {
147     // Insert %noreg if we didn't find a usable constant and had to drop it.
148     MIB.addReg(0U);
149   }
150 
151   return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
152 }
153 
154 MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
155   assert(isa<DILabel>(Label) && "not a label");
156   assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
157          "Expected inlined-at fields to agree");
158   auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
159 
160   return MIB.addMetadata(Label);
161 }
162 
163 MachineInstrBuilder MachineIRBuilder::buildDynStackAlloc(const DstOp &Res,
164                                                          const SrcOp &Size,
165                                                          Align Alignment) {
166   assert(Res.getLLTTy(*getMRI()).isPointer() && "expected ptr dst type");
167   auto MIB = buildInstr(TargetOpcode::G_DYN_STACKALLOC);
168   Res.addDefToMIB(*getMRI(), MIB);
169   Size.addSrcToMIB(MIB);
170   MIB.addImm(Alignment.value());
171   return MIB;
172 }
173 
174 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
175                                                       int Idx) {
176   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
177   auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
178   Res.addDefToMIB(*getMRI(), MIB);
179   MIB.addFrameIndex(Idx);
180   return MIB;
181 }
182 
183 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
184                                                        const GlobalValue *GV) {
185   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
186   assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
187              GV->getType()->getAddressSpace() &&
188          "address space mismatch");
189 
190   auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
191   Res.addDefToMIB(*getMRI(), MIB);
192   MIB.addGlobalAddress(GV);
193   return MIB;
194 }
195 
196 MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
197                                                      unsigned JTI) {
198   return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
199       .addJumpTableIndex(JTI);
200 }
201 
202 void MachineIRBuilder::validateBinaryOp(const LLT Res, const LLT Op0,
203                                         const LLT Op1) {
204   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
205   assert((Res == Op0 && Res == Op1) && "type mismatch");
206 }
207 
208 void MachineIRBuilder::validateShiftOp(const LLT Res, const LLT Op0,
209                                        const LLT Op1) {
210   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
211   assert((Res == Op0) && "type mismatch");
212 }
213 
214 MachineInstrBuilder MachineIRBuilder::buildPtrAdd(const DstOp &Res,
215                                                   const SrcOp &Op0,
216                                                   const SrcOp &Op1) {
217   assert(Res.getLLTTy(*getMRI()).getScalarType().isPointer() &&
218          Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
219   assert(Op1.getLLTTy(*getMRI()).getScalarType().isScalar() && "invalid offset type");
220 
221   return buildInstr(TargetOpcode::G_PTR_ADD, {Res}, {Op0, Op1});
222 }
223 
224 Optional<MachineInstrBuilder>
225 MachineIRBuilder::materializePtrAdd(Register &Res, Register Op0,
226                                     const LLT ValueTy, uint64_t Value) {
227   assert(Res == 0 && "Res is a result argument");
228   assert(ValueTy.isScalar()  && "invalid offset type");
229 
230   if (Value == 0) {
231     Res = Op0;
232     return None;
233   }
234 
235   Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
236   auto Cst = buildConstant(ValueTy, Value);
237   return buildPtrAdd(Res, Op0, Cst.getReg(0));
238 }
239 
240 MachineInstrBuilder MachineIRBuilder::buildPtrMask(const DstOp &Res,
241                                                    const SrcOp &Op0,
242                                                    uint32_t NumBits) {
243   assert(Res.getLLTTy(*getMRI()).isPointer() &&
244          Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
245 
246   auto MIB = buildInstr(TargetOpcode::G_PTR_MASK);
247   Res.addDefToMIB(*getMRI(), MIB);
248   Op0.addSrcToMIB(MIB);
249   MIB.addImm(NumBits);
250   return MIB;
251 }
252 
253 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
254   return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
255 }
256 
257 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
258   assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
259   return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
260 }
261 
262 MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
263                                                 unsigned JTI,
264                                                 Register IndexReg) {
265   assert(getMRI()->getType(TablePtr).isPointer() &&
266          "Table reg must be a pointer");
267   return buildInstr(TargetOpcode::G_BRJT)
268       .addUse(TablePtr)
269       .addJumpTableIndex(JTI)
270       .addUse(IndexReg);
271 }
272 
273 MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
274                                                 const SrcOp &Op) {
275   return buildInstr(TargetOpcode::COPY, Res, Op);
276 }
277 
278 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
279                                                     const ConstantInt &Val) {
280   LLT Ty = Res.getLLTTy(*getMRI());
281   LLT EltTy = Ty.getScalarType();
282   assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
283          "creating constant with the wrong size");
284 
285   if (Ty.isVector()) {
286     auto Const = buildInstr(TargetOpcode::G_CONSTANT)
287     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
288     .addCImm(&Val);
289     return buildSplatVector(Res, Const);
290   }
291 
292   auto Const = buildInstr(TargetOpcode::G_CONSTANT);
293   Const->setDebugLoc(DebugLoc());
294   Res.addDefToMIB(*getMRI(), Const);
295   Const.addCImm(&Val);
296   return Const;
297 }
298 
299 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
300                                                     int64_t Val) {
301   auto IntN = IntegerType::get(getMF().getFunction().getContext(),
302                                Res.getLLTTy(*getMRI()).getScalarSizeInBits());
303   ConstantInt *CI = ConstantInt::get(IntN, Val, true);
304   return buildConstant(Res, *CI);
305 }
306 
307 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
308                                                      const ConstantFP &Val) {
309   LLT Ty = Res.getLLTTy(*getMRI());
310   LLT EltTy = Ty.getScalarType();
311 
312   assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
313          == EltTy.getSizeInBits() &&
314          "creating fconstant with the wrong size");
315 
316   assert(!Ty.isPointer() && "invalid operand type");
317 
318   if (Ty.isVector()) {
319     auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
320     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
321     .addFPImm(&Val);
322 
323     return buildSplatVector(Res, Const);
324   }
325 
326   auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
327   Const->setDebugLoc(DebugLoc());
328   Res.addDefToMIB(*getMRI(), Const);
329   Const.addFPImm(&Val);
330   return Const;
331 }
332 
333 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
334                                                     const APInt &Val) {
335   ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
336   return buildConstant(Res, *CI);
337 }
338 
339 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
340                                                      double Val) {
341   LLT DstTy = Res.getLLTTy(*getMRI());
342   auto &Ctx = getMF().getFunction().getContext();
343   auto *CFP =
344       ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
345   return buildFConstant(Res, *CFP);
346 }
347 
348 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
349                                                      const APFloat &Val) {
350   auto &Ctx = getMF().getFunction().getContext();
351   auto *CFP = ConstantFP::get(Ctx, Val);
352   return buildFConstant(Res, *CFP);
353 }
354 
355 MachineInstrBuilder MachineIRBuilder::buildBrCond(Register Tst,
356                                                   MachineBasicBlock &Dest) {
357   assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
358 
359   return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
360 }
361 
362 MachineInstrBuilder MachineIRBuilder::buildLoad(const DstOp &Res,
363                                                 const SrcOp &Addr,
364                                                 MachineMemOperand &MMO) {
365   return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
366 }
367 
368 MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
369                                                      const DstOp &Res,
370                                                      const SrcOp &Addr,
371                                                      MachineMemOperand &MMO) {
372   assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
373   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
374 
375   auto MIB = buildInstr(Opcode);
376   Res.addDefToMIB(*getMRI(), MIB);
377   Addr.addSrcToMIB(MIB);
378   MIB.addMemOperand(&MMO);
379   return MIB;
380 }
381 
382 MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
383                                                  const SrcOp &Addr,
384                                                  MachineMemOperand &MMO) {
385   assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
386   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
387 
388   auto MIB = buildInstr(TargetOpcode::G_STORE);
389   Val.addSrcToMIB(MIB);
390   Addr.addSrcToMIB(MIB);
391   MIB.addMemOperand(&MMO);
392   return MIB;
393 }
394 
395 MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
396                                                   const SrcOp &Op) {
397   return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
398 }
399 
400 MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
401                                                 const SrcOp &Op) {
402   return buildInstr(TargetOpcode::G_SEXT, Res, Op);
403 }
404 
405 MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
406                                                 const SrcOp &Op) {
407   return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
408 }
409 
410 unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
411   const auto *TLI = getMF().getSubtarget().getTargetLowering();
412   switch (TLI->getBooleanContents(IsVec, IsFP)) {
413   case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
414     return TargetOpcode::G_SEXT;
415   case TargetLoweringBase::ZeroOrOneBooleanContent:
416     return TargetOpcode::G_ZEXT;
417   default:
418     return TargetOpcode::G_ANYEXT;
419   }
420 }
421 
422 MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
423                                                    const SrcOp &Op,
424                                                    bool IsFP) {
425   unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
426   return buildInstr(ExtOp, Res, Op);
427 }
428 
429 MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
430                                                       const DstOp &Res,
431                                                       const SrcOp &Op) {
432   assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
433           TargetOpcode::G_SEXT == ExtOpc) &&
434          "Expecting Extending Opc");
435   assert(Res.getLLTTy(*getMRI()).isScalar() ||
436          Res.getLLTTy(*getMRI()).isVector());
437   assert(Res.getLLTTy(*getMRI()).isScalar() ==
438          Op.getLLTTy(*getMRI()).isScalar());
439 
440   unsigned Opcode = TargetOpcode::COPY;
441   if (Res.getLLTTy(*getMRI()).getSizeInBits() >
442       Op.getLLTTy(*getMRI()).getSizeInBits())
443     Opcode = ExtOpc;
444   else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
445            Op.getLLTTy(*getMRI()).getSizeInBits())
446     Opcode = TargetOpcode::G_TRUNC;
447   else
448     assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
449 
450   return buildInstr(Opcode, Res, Op);
451 }
452 
453 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
454                                                        const SrcOp &Op) {
455   return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
456 }
457 
458 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
459                                                        const SrcOp &Op) {
460   return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
461 }
462 
463 MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
464                                                          const SrcOp &Op) {
465   return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
466 }
467 
468 MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
469                                                 const SrcOp &Src) {
470   LLT SrcTy = Src.getLLTTy(*getMRI());
471   LLT DstTy = Dst.getLLTTy(*getMRI());
472   if (SrcTy == DstTy)
473     return buildCopy(Dst, Src);
474 
475   unsigned Opcode;
476   if (SrcTy.isPointer() && DstTy.isScalar())
477     Opcode = TargetOpcode::G_PTRTOINT;
478   else if (DstTy.isPointer() && SrcTy.isScalar())
479     Opcode = TargetOpcode::G_INTTOPTR;
480   else {
481     assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
482     Opcode = TargetOpcode::G_BITCAST;
483   }
484 
485   return buildInstr(Opcode, Dst, Src);
486 }
487 
488 MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
489                                                    const SrcOp &Src,
490                                                    uint64_t Index) {
491   LLT SrcTy = Src.getLLTTy(*getMRI());
492   LLT DstTy = Dst.getLLTTy(*getMRI());
493 
494 #ifndef NDEBUG
495   assert(SrcTy.isValid() && "invalid operand type");
496   assert(DstTy.isValid() && "invalid operand type");
497   assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
498          "extracting off end of register");
499 #endif
500 
501   if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
502     assert(Index == 0 && "insertion past the end of a register");
503     return buildCast(Dst, Src);
504   }
505 
506   auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
507   Dst.addDefToMIB(*getMRI(), Extract);
508   Src.addSrcToMIB(Extract);
509   Extract.addImm(Index);
510   return Extract;
511 }
512 
513 void MachineIRBuilder::buildSequence(Register Res, ArrayRef<Register> Ops,
514                                      ArrayRef<uint64_t> Indices) {
515 #ifndef NDEBUG
516   assert(Ops.size() == Indices.size() && "incompatible args");
517   assert(!Ops.empty() && "invalid trivial sequence");
518   assert(llvm::is_sorted(Indices) &&
519          "sequence offsets must be in ascending order");
520 
521   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
522   for (auto Op : Ops)
523     assert(getMRI()->getType(Op).isValid() && "invalid operand type");
524 #endif
525 
526   LLT ResTy = getMRI()->getType(Res);
527   LLT OpTy = getMRI()->getType(Ops[0]);
528   unsigned OpSize = OpTy.getSizeInBits();
529   bool MaybeMerge = true;
530   for (unsigned i = 0; i < Ops.size(); ++i) {
531     if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
532       MaybeMerge = false;
533       break;
534     }
535   }
536 
537   if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
538     buildMerge(Res, Ops);
539     return;
540   }
541 
542   Register ResIn = getMRI()->createGenericVirtualRegister(ResTy);
543   buildUndef(ResIn);
544 
545   for (unsigned i = 0; i < Ops.size(); ++i) {
546     Register ResOut = i + 1 == Ops.size()
547                           ? Res
548                           : getMRI()->createGenericVirtualRegister(ResTy);
549     buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
550     ResIn = ResOut;
551   }
552 }
553 
554 MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
555   return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
556 }
557 
558 MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
559                                                  ArrayRef<Register> Ops) {
560   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
561   // we need some temporary storage for the DstOp objects. Here we use a
562   // sufficiently large SmallVector to not go through the heap.
563   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
564   assert(TmpVec.size() > 1);
565   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
566 }
567 
568 MachineInstrBuilder
569 MachineIRBuilder::buildMerge(const DstOp &Res,
570                              std::initializer_list<SrcOp> Ops) {
571   assert(Ops.size() > 1);
572   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, Ops);
573 }
574 
575 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
576                                                    const SrcOp &Op) {
577   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
578   // we need some temporary storage for the DstOp objects. Here we use a
579   // sufficiently large SmallVector to not go through the heap.
580   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
581   assert(TmpVec.size() > 1);
582   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
583 }
584 
585 MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
586                                                    const SrcOp &Op) {
587   unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
588   SmallVector<Register, 8> TmpVec;
589   for (unsigned I = 0; I != NumReg; ++I)
590     TmpVec.push_back(getMRI()->createGenericVirtualRegister(Res));
591   return buildUnmerge(TmpVec, Op);
592 }
593 
594 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
595                                                    const SrcOp &Op) {
596   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
597   // we need some temporary storage for the DstOp objects. Here we use a
598   // sufficiently large SmallVector to not go through the heap.
599   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
600   assert(TmpVec.size() > 1);
601   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
602 }
603 
604 MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
605                                                        ArrayRef<Register> Ops) {
606   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
607   // we need some temporary storage for the DstOp objects. Here we use a
608   // sufficiently large SmallVector to not go through the heap.
609   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
610   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
611 }
612 
613 MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
614                                                        const SrcOp &Src) {
615   SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
616   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
617 }
618 
619 MachineInstrBuilder
620 MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
621                                         ArrayRef<Register> Ops) {
622   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
623   // we need some temporary storage for the DstOp objects. Here we use a
624   // sufficiently large SmallVector to not go through the heap.
625   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
626   return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
627 }
628 
629 MachineInstrBuilder
630 MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
631   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
632   // we need some temporary storage for the DstOp objects. Here we use a
633   // sufficiently large SmallVector to not go through the heap.
634   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
635   return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
636 }
637 
638 MachineInstrBuilder MachineIRBuilder::buildInsert(const DstOp &Res,
639                                                   const SrcOp &Src,
640                                                   const SrcOp &Op,
641                                                   unsigned Index) {
642   assert(Index + Op.getLLTTy(*getMRI()).getSizeInBits() <=
643              Res.getLLTTy(*getMRI()).getSizeInBits() &&
644          "insertion past the end of a register");
645 
646   if (Res.getLLTTy(*getMRI()).getSizeInBits() ==
647       Op.getLLTTy(*getMRI()).getSizeInBits()) {
648     return buildCast(Res, Op);
649   }
650 
651   return buildInstr(TargetOpcode::G_INSERT, Res, {Src, Op, uint64_t(Index)});
652 }
653 
654 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
655                                                      ArrayRef<Register> ResultRegs,
656                                                      bool HasSideEffects) {
657   auto MIB =
658       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
659                                 : TargetOpcode::G_INTRINSIC);
660   for (unsigned ResultReg : ResultRegs)
661     MIB.addDef(ResultReg);
662   MIB.addIntrinsicID(ID);
663   return MIB;
664 }
665 
666 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
667                                                      ArrayRef<DstOp> Results,
668                                                      bool HasSideEffects) {
669   auto MIB =
670       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
671                                 : TargetOpcode::G_INTRINSIC);
672   for (DstOp Result : Results)
673     Result.addDefToMIB(*getMRI(), MIB);
674   MIB.addIntrinsicID(ID);
675   return MIB;
676 }
677 
678 MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
679                                                  const SrcOp &Op) {
680   return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
681 }
682 
683 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
684                                                    const SrcOp &Op,
685                                                    Optional<unsigned> Flags) {
686   return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op, Flags);
687 }
688 
689 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
690                                                 const DstOp &Res,
691                                                 const SrcOp &Op0,
692                                                 const SrcOp &Op1) {
693   return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
694 }
695 
696 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
697                                                 const DstOp &Res,
698                                                 const SrcOp &Op0,
699                                                 const SrcOp &Op1,
700                                                 Optional<unsigned> Flags) {
701 
702   return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
703 }
704 
705 MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
706                                                   const SrcOp &Tst,
707                                                   const SrcOp &Op0,
708                                                   const SrcOp &Op1,
709                                                   Optional<unsigned> Flags) {
710 
711   return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
712 }
713 
714 MachineInstrBuilder
715 MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
716                                            const SrcOp &Elt, const SrcOp &Idx) {
717   return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
718 }
719 
720 MachineInstrBuilder
721 MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
722                                             const SrcOp &Idx) {
723   return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
724 }
725 
726 MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
727     Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
728     Register NewVal, MachineMemOperand &MMO) {
729 #ifndef NDEBUG
730   LLT OldValResTy = getMRI()->getType(OldValRes);
731   LLT SuccessResTy = getMRI()->getType(SuccessRes);
732   LLT AddrTy = getMRI()->getType(Addr);
733   LLT CmpValTy = getMRI()->getType(CmpVal);
734   LLT NewValTy = getMRI()->getType(NewVal);
735   assert(OldValResTy.isScalar() && "invalid operand type");
736   assert(SuccessResTy.isScalar() && "invalid operand type");
737   assert(AddrTy.isPointer() && "invalid operand type");
738   assert(CmpValTy.isValid() && "invalid operand type");
739   assert(NewValTy.isValid() && "invalid operand type");
740   assert(OldValResTy == CmpValTy && "type mismatch");
741   assert(OldValResTy == NewValTy && "type mismatch");
742 #endif
743 
744   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
745       .addDef(OldValRes)
746       .addDef(SuccessRes)
747       .addUse(Addr)
748       .addUse(CmpVal)
749       .addUse(NewVal)
750       .addMemOperand(&MMO);
751 }
752 
753 MachineInstrBuilder
754 MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
755                                      Register CmpVal, Register NewVal,
756                                      MachineMemOperand &MMO) {
757 #ifndef NDEBUG
758   LLT OldValResTy = getMRI()->getType(OldValRes);
759   LLT AddrTy = getMRI()->getType(Addr);
760   LLT CmpValTy = getMRI()->getType(CmpVal);
761   LLT NewValTy = getMRI()->getType(NewVal);
762   assert(OldValResTy.isScalar() && "invalid operand type");
763   assert(AddrTy.isPointer() && "invalid operand type");
764   assert(CmpValTy.isValid() && "invalid operand type");
765   assert(NewValTy.isValid() && "invalid operand type");
766   assert(OldValResTy == CmpValTy && "type mismatch");
767   assert(OldValResTy == NewValTy && "type mismatch");
768 #endif
769 
770   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
771       .addDef(OldValRes)
772       .addUse(Addr)
773       .addUse(CmpVal)
774       .addUse(NewVal)
775       .addMemOperand(&MMO);
776 }
777 
778 MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(
779   unsigned Opcode, const DstOp &OldValRes,
780   const SrcOp &Addr, const SrcOp &Val,
781   MachineMemOperand &MMO) {
782 
783 #ifndef NDEBUG
784   LLT OldValResTy = OldValRes.getLLTTy(*getMRI());
785   LLT AddrTy = Addr.getLLTTy(*getMRI());
786   LLT ValTy = Val.getLLTTy(*getMRI());
787   assert(OldValResTy.isScalar() && "invalid operand type");
788   assert(AddrTy.isPointer() && "invalid operand type");
789   assert(ValTy.isValid() && "invalid operand type");
790   assert(OldValResTy == ValTy && "type mismatch");
791   assert(MMO.isAtomic() && "not atomic mem operand");
792 #endif
793 
794   auto MIB = buildInstr(Opcode);
795   OldValRes.addDefToMIB(*getMRI(), MIB);
796   Addr.addSrcToMIB(MIB);
797   Val.addSrcToMIB(MIB);
798   MIB.addMemOperand(&MMO);
799   return MIB;
800 }
801 
802 MachineInstrBuilder
803 MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
804                                      Register Val, MachineMemOperand &MMO) {
805   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
806                         MMO);
807 }
808 MachineInstrBuilder
809 MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
810                                     Register Val, MachineMemOperand &MMO) {
811   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
812                         MMO);
813 }
814 MachineInstrBuilder
815 MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
816                                     Register Val, MachineMemOperand &MMO) {
817   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
818                         MMO);
819 }
820 MachineInstrBuilder
821 MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
822                                     Register Val, MachineMemOperand &MMO) {
823   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
824                         MMO);
825 }
826 MachineInstrBuilder
827 MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
828                                      Register Val, MachineMemOperand &MMO) {
829   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
830                         MMO);
831 }
832 MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
833                                                        Register Addr,
834                                                        Register Val,
835                                                        MachineMemOperand &MMO) {
836   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
837                         MMO);
838 }
839 MachineInstrBuilder
840 MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
841                                     Register Val, MachineMemOperand &MMO) {
842   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
843                         MMO);
844 }
845 MachineInstrBuilder
846 MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
847                                     Register Val, MachineMemOperand &MMO) {
848   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
849                         MMO);
850 }
851 MachineInstrBuilder
852 MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
853                                     Register Val, MachineMemOperand &MMO) {
854   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
855                         MMO);
856 }
857 MachineInstrBuilder
858 MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
859                                      Register Val, MachineMemOperand &MMO) {
860   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
861                         MMO);
862 }
863 MachineInstrBuilder
864 MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
865                                      Register Val, MachineMemOperand &MMO) {
866   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
867                         MMO);
868 }
869 
870 MachineInstrBuilder
871 MachineIRBuilder::buildAtomicRMWFAdd(
872   const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
873   MachineMemOperand &MMO) {
874   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
875                         MMO);
876 }
877 
878 MachineInstrBuilder
879 MachineIRBuilder::buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
880                                      MachineMemOperand &MMO) {
881   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
882                         MMO);
883 }
884 
885 MachineInstrBuilder
886 MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
887   return buildInstr(TargetOpcode::G_FENCE)
888     .addImm(Ordering)
889     .addImm(Scope);
890 }
891 
892 MachineInstrBuilder
893 MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
894 #ifndef NDEBUG
895   assert(getMRI()->getType(Res).isPointer() && "invalid res type");
896 #endif
897 
898   return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
899 }
900 
901 void MachineIRBuilder::validateTruncExt(const LLT DstTy, const LLT SrcTy,
902                                         bool IsExtend) {
903 #ifndef NDEBUG
904   if (DstTy.isVector()) {
905     assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
906     assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
907            "different number of elements in a trunc/ext");
908   } else
909     assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
910 
911   if (IsExtend)
912     assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
913            "invalid narrowing extend");
914   else
915     assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
916            "invalid widening trunc");
917 #endif
918 }
919 
920 void MachineIRBuilder::validateSelectOp(const LLT ResTy, const LLT TstTy,
921                                         const LLT Op0Ty, const LLT Op1Ty) {
922 #ifndef NDEBUG
923   assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
924          "invalid operand type");
925   assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
926   if (ResTy.isScalar() || ResTy.isPointer())
927     assert(TstTy.isScalar() && "type mismatch");
928   else
929     assert((TstTy.isScalar() ||
930             (TstTy.isVector() &&
931              TstTy.getNumElements() == Op0Ty.getNumElements())) &&
932            "type mismatch");
933 #endif
934 }
935 
936 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
937                                                  ArrayRef<DstOp> DstOps,
938                                                  ArrayRef<SrcOp> SrcOps,
939                                                  Optional<unsigned> Flags) {
940   switch (Opc) {
941   default:
942     break;
943   case TargetOpcode::G_SELECT: {
944     assert(DstOps.size() == 1 && "Invalid select");
945     assert(SrcOps.size() == 3 && "Invalid select");
946     validateSelectOp(
947         DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
948         SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
949     break;
950   }
951   case TargetOpcode::G_ADD:
952   case TargetOpcode::G_AND:
953   case TargetOpcode::G_MUL:
954   case TargetOpcode::G_OR:
955   case TargetOpcode::G_SUB:
956   case TargetOpcode::G_XOR:
957   case TargetOpcode::G_UDIV:
958   case TargetOpcode::G_SDIV:
959   case TargetOpcode::G_UREM:
960   case TargetOpcode::G_SREM:
961   case TargetOpcode::G_SMIN:
962   case TargetOpcode::G_SMAX:
963   case TargetOpcode::G_UMIN:
964   case TargetOpcode::G_UMAX:
965   case TargetOpcode::G_UADDSAT:
966   case TargetOpcode::G_SADDSAT:
967   case TargetOpcode::G_USUBSAT:
968   case TargetOpcode::G_SSUBSAT: {
969     // All these are binary ops.
970     assert(DstOps.size() == 1 && "Invalid Dst");
971     assert(SrcOps.size() == 2 && "Invalid Srcs");
972     validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
973                      SrcOps[0].getLLTTy(*getMRI()),
974                      SrcOps[1].getLLTTy(*getMRI()));
975     break;
976   }
977   case TargetOpcode::G_SHL:
978   case TargetOpcode::G_ASHR:
979   case TargetOpcode::G_LSHR: {
980     assert(DstOps.size() == 1 && "Invalid Dst");
981     assert(SrcOps.size() == 2 && "Invalid Srcs");
982     validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
983                     SrcOps[0].getLLTTy(*getMRI()),
984                     SrcOps[1].getLLTTy(*getMRI()));
985     break;
986   }
987   case TargetOpcode::G_SEXT:
988   case TargetOpcode::G_ZEXT:
989   case TargetOpcode::G_ANYEXT:
990     assert(DstOps.size() == 1 && "Invalid Dst");
991     assert(SrcOps.size() == 1 && "Invalid Srcs");
992     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
993                      SrcOps[0].getLLTTy(*getMRI()), true);
994     break;
995   case TargetOpcode::G_TRUNC:
996   case TargetOpcode::G_FPTRUNC: {
997     assert(DstOps.size() == 1 && "Invalid Dst");
998     assert(SrcOps.size() == 1 && "Invalid Srcs");
999     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
1000                      SrcOps[0].getLLTTy(*getMRI()), false);
1001     break;
1002   }
1003   case TargetOpcode::G_BITCAST: {
1004     assert(DstOps.size() == 1 && "Invalid Dst");
1005     assert(SrcOps.size() == 1 && "Invalid Srcs");
1006     assert(DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1007            SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() && "invalid bitcast");
1008     break;
1009   }
1010   case TargetOpcode::COPY:
1011     assert(DstOps.size() == 1 && "Invalid Dst");
1012     // If the caller wants to add a subreg source it has to be done separately
1013     // so we may not have any SrcOps at this point yet.
1014     break;
1015   case TargetOpcode::G_FCMP:
1016   case TargetOpcode::G_ICMP: {
1017     assert(DstOps.size() == 1 && "Invalid Dst Operands");
1018     assert(SrcOps.size() == 3 && "Invalid Src Operands");
1019     // For F/ICMP, the first src operand is the predicate, followed by
1020     // the two comparands.
1021     assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
1022            "Expecting predicate");
1023     assert([&]() -> bool {
1024       CmpInst::Predicate Pred = SrcOps[0].getPredicate();
1025       return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
1026                                          : CmpInst::isFPPredicate(Pred);
1027     }() && "Invalid predicate");
1028     assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1029            "Type mismatch");
1030     assert([&]() -> bool {
1031       LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
1032       LLT DstTy = DstOps[0].getLLTTy(*getMRI());
1033       if (Op0Ty.isScalar() || Op0Ty.isPointer())
1034         return DstTy.isScalar();
1035       else
1036         return DstTy.isVector() &&
1037                DstTy.getNumElements() == Op0Ty.getNumElements();
1038     }() && "Type Mismatch");
1039     break;
1040   }
1041   case TargetOpcode::G_UNMERGE_VALUES: {
1042     assert(!DstOps.empty() && "Invalid trivial sequence");
1043     assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
1044     assert(std::all_of(DstOps.begin(), DstOps.end(),
1045                        [&, this](const DstOp &Op) {
1046                          return Op.getLLTTy(*getMRI()) ==
1047                                 DstOps[0].getLLTTy(*getMRI());
1048                        }) &&
1049            "type mismatch in output list");
1050     assert(DstOps.size() * DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1051                SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1052            "input operands do not cover output register");
1053     break;
1054   }
1055   case TargetOpcode::G_MERGE_VALUES: {
1056     assert(!SrcOps.empty() && "invalid trivial sequence");
1057     assert(DstOps.size() == 1 && "Invalid Dst");
1058     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1059                        [&, this](const SrcOp &Op) {
1060                          return Op.getLLTTy(*getMRI()) ==
1061                                 SrcOps[0].getLLTTy(*getMRI());
1062                        }) &&
1063            "type mismatch in input list");
1064     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1065                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1066            "input operands do not cover output register");
1067     if (SrcOps.size() == 1)
1068       return buildCast(DstOps[0], SrcOps[0]);
1069     if (DstOps[0].getLLTTy(*getMRI()).isVector()) {
1070       if (SrcOps[0].getLLTTy(*getMRI()).isVector())
1071         return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
1072       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1073     }
1074     break;
1075   }
1076   case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
1077     assert(DstOps.size() == 1 && "Invalid Dst size");
1078     assert(SrcOps.size() == 2 && "Invalid Src size");
1079     assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1080     assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
1081             DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
1082            "Invalid operand type");
1083     assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
1084     assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
1085                DstOps[0].getLLTTy(*getMRI()) &&
1086            "Type mismatch");
1087     break;
1088   }
1089   case TargetOpcode::G_INSERT_VECTOR_ELT: {
1090     assert(DstOps.size() == 1 && "Invalid dst size");
1091     assert(SrcOps.size() == 3 && "Invalid src size");
1092     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1093            SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1094     assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
1095                SrcOps[1].getLLTTy(*getMRI()) &&
1096            "Type mismatch");
1097     assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
1098     assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
1099                SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
1100            "Type mismatch");
1101     break;
1102   }
1103   case TargetOpcode::G_BUILD_VECTOR: {
1104     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1105            "Must have at least 2 operands");
1106     assert(DstOps.size() == 1 && "Invalid DstOps");
1107     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1108            "Res type must be a vector");
1109     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1110                        [&, this](const SrcOp &Op) {
1111                          return Op.getLLTTy(*getMRI()) ==
1112                                 SrcOps[0].getLLTTy(*getMRI());
1113                        }) &&
1114            "type mismatch in input list");
1115     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1116                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1117            "input scalars do not exactly cover the output vector register");
1118     break;
1119   }
1120   case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1121     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1122            "Must have at least 2 operands");
1123     assert(DstOps.size() == 1 && "Invalid DstOps");
1124     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1125            "Res type must be a vector");
1126     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1127                        [&, this](const SrcOp &Op) {
1128                          return Op.getLLTTy(*getMRI()) ==
1129                                 SrcOps[0].getLLTTy(*getMRI());
1130                        }) &&
1131            "type mismatch in input list");
1132     if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1133         DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
1134       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1135     break;
1136   }
1137   case TargetOpcode::G_CONCAT_VECTORS: {
1138     assert(DstOps.size() == 1 && "Invalid DstOps");
1139     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1140            "Must have at least 2 operands");
1141     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1142                        [&, this](const SrcOp &Op) {
1143                          return (Op.getLLTTy(*getMRI()).isVector() &&
1144                                  Op.getLLTTy(*getMRI()) ==
1145                                      SrcOps[0].getLLTTy(*getMRI()));
1146                        }) &&
1147            "type mismatch in input list");
1148     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1149                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1150            "input vectors do not exactly cover the output vector register");
1151     break;
1152   }
1153   case TargetOpcode::G_UADDE: {
1154     assert(DstOps.size() == 2 && "Invalid no of dst operands");
1155     assert(SrcOps.size() == 3 && "Invalid no of src operands");
1156     assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1157     assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1158            (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1159            "Invalid operand");
1160     assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1161     assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1162            "type mismatch");
1163     break;
1164   }
1165   }
1166 
1167   auto MIB = buildInstr(Opc);
1168   for (const DstOp &Op : DstOps)
1169     Op.addDefToMIB(*getMRI(), MIB);
1170   for (const SrcOp &Op : SrcOps)
1171     Op.addSrcToMIB(MIB);
1172   if (Flags)
1173     MIB->setFlags(*Flags);
1174   return MIB;
1175 }
1176