1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 /// \file
9 /// This file implements the MachineIRBuidler class.
10 //===----------------------------------------------------------------------===//
11 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12 #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
13 
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetLowering.h"
20 #include "llvm/CodeGen/TargetOpcodes.h"
21 #include "llvm/CodeGen/TargetSubtargetInfo.h"
22 #include "llvm/IR/DebugInfo.h"
23 
24 using namespace llvm;
25 
26 void MachineIRBuilder::setMF(MachineFunction &MF) {
27   State.MF = &MF;
28   State.MBB = nullptr;
29   State.MRI = &MF.getRegInfo();
30   State.TII = MF.getSubtarget().getInstrInfo();
31   State.DL = DebugLoc();
32   State.II = MachineBasicBlock::iterator();
33   State.Observer = nullptr;
34 }
35 
36 void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) {
37   State.MBB = &MBB;
38   State.II = MBB.end();
39   assert(&getMF() == MBB.getParent() &&
40          "Basic block is in a different function");
41 }
42 
43 void MachineIRBuilder::setInstr(MachineInstr &MI) {
44   assert(MI.getParent() && "Instruction is not part of a basic block");
45   setMBB(*MI.getParent());
46   State.II = MI.getIterator();
47 }
48 
49 void MachineIRBuilder::setCSEInfo(GISelCSEInfo *Info) { State.CSEInfo = Info; }
50 
51 void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB,
52                                    MachineBasicBlock::iterator II) {
53   assert(MBB.getParent() == &getMF() &&
54          "Basic block is in a different function");
55   State.MBB = &MBB;
56   State.II = II;
57 }
58 
59 void MachineIRBuilder::recordInsertion(MachineInstr *InsertedInstr) const {
60   if (State.Observer)
61     State.Observer->createdInstr(*InsertedInstr);
62 }
63 
64 void MachineIRBuilder::setChangeObserver(GISelChangeObserver &Observer) {
65   State.Observer = &Observer;
66 }
67 
68 void MachineIRBuilder::stopObservingChanges() { State.Observer = nullptr; }
69 
70 //------------------------------------------------------------------------------
71 // Build instruction variants.
72 //------------------------------------------------------------------------------
73 
74 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) {
75   return insertInstr(buildInstrNoInsert(Opcode));
76 }
77 
78 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
79   MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80   return MIB;
81 }
82 
83 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
84   getMBB().insert(getInsertPt(), MIB);
85   recordInsertion(MIB);
86   return MIB;
87 }
88 
89 MachineInstrBuilder
90 MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
91                                       const MDNode *Expr) {
92   assert(isa<DILocalVariable>(Variable) && "not a variable");
93   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94   assert(
95       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96       "Expected inlined-at fields to agree");
97   return insertInstr(BuildMI(getMF(), getDL(),
98                              getTII().get(TargetOpcode::DBG_VALUE),
99                              /*IsIndirect*/ false, Reg, Variable, Expr));
100 }
101 
102 MachineInstrBuilder
103 MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
104                                         const MDNode *Expr) {
105   assert(isa<DILocalVariable>(Variable) && "not a variable");
106   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
107   assert(
108       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
109       "Expected inlined-at fields to agree");
110   return insertInstr(BuildMI(getMF(), getDL(),
111                              getTII().get(TargetOpcode::DBG_VALUE),
112                              /*IsIndirect*/ true, Reg, Variable, Expr));
113 }
114 
115 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
116                                                       const MDNode *Variable,
117                                                       const MDNode *Expr) {
118   assert(isa<DILocalVariable>(Variable) && "not a variable");
119   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
120   assert(
121       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
122       "Expected inlined-at fields to agree");
123   return buildInstr(TargetOpcode::DBG_VALUE)
124       .addFrameIndex(FI)
125       .addImm(0)
126       .addMetadata(Variable)
127       .addMetadata(Expr);
128 }
129 
130 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
131                                                          const MDNode *Variable,
132                                                          const MDNode *Expr) {
133   assert(isa<DILocalVariable>(Variable) && "not a variable");
134   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
135   assert(
136       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
137       "Expected inlined-at fields to agree");
138   auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
139   if (auto *CI = dyn_cast<ConstantInt>(&C)) {
140     if (CI->getBitWidth() > 64)
141       MIB.addCImm(CI);
142     else
143       MIB.addImm(CI->getZExtValue());
144   } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
145     MIB.addFPImm(CFP);
146   } else {
147     // Insert %noreg if we didn't find a usable constant and had to drop it.
148     MIB.addReg(0U);
149   }
150 
151   return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
152 }
153 
154 MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
155   assert(isa<DILabel>(Label) && "not a label");
156   assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
157          "Expected inlined-at fields to agree");
158   auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
159 
160   return MIB.addMetadata(Label);
161 }
162 
163 MachineInstrBuilder MachineIRBuilder::buildDynStackAlloc(const DstOp &Res,
164                                                          const SrcOp &Size,
165                                                          Align Alignment) {
166   assert(Res.getLLTTy(*getMRI()).isPointer() && "expected ptr dst type");
167   auto MIB = buildInstr(TargetOpcode::G_DYN_STACKALLOC);
168   Res.addDefToMIB(*getMRI(), MIB);
169   Size.addSrcToMIB(MIB);
170   MIB.addImm(Alignment.value());
171   return MIB;
172 }
173 
174 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
175                                                       int Idx) {
176   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
177   auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
178   Res.addDefToMIB(*getMRI(), MIB);
179   MIB.addFrameIndex(Idx);
180   return MIB;
181 }
182 
183 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
184                                                        const GlobalValue *GV) {
185   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
186   assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
187              GV->getType()->getAddressSpace() &&
188          "address space mismatch");
189 
190   auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
191   Res.addDefToMIB(*getMRI(), MIB);
192   MIB.addGlobalAddress(GV);
193   return MIB;
194 }
195 
196 MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
197                                                      unsigned JTI) {
198   return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
199       .addJumpTableIndex(JTI);
200 }
201 
202 void MachineIRBuilder::validateBinaryOp(const LLT Res, const LLT Op0,
203                                         const LLT Op1) {
204   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
205   assert((Res == Op0 && Res == Op1) && "type mismatch");
206 }
207 
208 void MachineIRBuilder::validateShiftOp(const LLT Res, const LLT Op0,
209                                        const LLT Op1) {
210   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
211   assert((Res == Op0) && "type mismatch");
212 }
213 
214 MachineInstrBuilder MachineIRBuilder::buildPtrAdd(const DstOp &Res,
215                                                   const SrcOp &Op0,
216                                                   const SrcOp &Op1) {
217   assert(Res.getLLTTy(*getMRI()).getScalarType().isPointer() &&
218          Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
219   assert(Op1.getLLTTy(*getMRI()).getScalarType().isScalar() && "invalid offset type");
220 
221   return buildInstr(TargetOpcode::G_PTR_ADD, {Res}, {Op0, Op1});
222 }
223 
224 Optional<MachineInstrBuilder>
225 MachineIRBuilder::materializePtrAdd(Register &Res, Register Op0,
226                                     const LLT ValueTy, uint64_t Value) {
227   assert(Res == 0 && "Res is a result argument");
228   assert(ValueTy.isScalar()  && "invalid offset type");
229 
230   if (Value == 0) {
231     Res = Op0;
232     return None;
233   }
234 
235   Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
236   auto Cst = buildConstant(ValueTy, Value);
237   return buildPtrAdd(Res, Op0, Cst.getReg(0));
238 }
239 
240 MachineInstrBuilder MachineIRBuilder::buildPtrMask(const DstOp &Res,
241                                                    const SrcOp &Op0,
242                                                    uint32_t NumBits) {
243   assert(Res.getLLTTy(*getMRI()).isPointer() &&
244          Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
245 
246   auto MIB = buildInstr(TargetOpcode::G_PTR_MASK);
247   Res.addDefToMIB(*getMRI(), MIB);
248   Op0.addSrcToMIB(MIB);
249   MIB.addImm(NumBits);
250   return MIB;
251 }
252 
253 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
254   return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
255 }
256 
257 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
258   assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
259   return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
260 }
261 
262 MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
263                                                 unsigned JTI,
264                                                 Register IndexReg) {
265   assert(getMRI()->getType(TablePtr).isPointer() &&
266          "Table reg must be a pointer");
267   return buildInstr(TargetOpcode::G_BRJT)
268       .addUse(TablePtr)
269       .addJumpTableIndex(JTI)
270       .addUse(IndexReg);
271 }
272 
273 MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
274                                                 const SrcOp &Op) {
275   return buildInstr(TargetOpcode::COPY, Res, Op);
276 }
277 
278 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
279                                                     const ConstantInt &Val) {
280   LLT Ty = Res.getLLTTy(*getMRI());
281   LLT EltTy = Ty.getScalarType();
282   assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
283          "creating constant with the wrong size");
284 
285   if (Ty.isVector()) {
286     auto Const = buildInstr(TargetOpcode::G_CONSTANT)
287     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
288     .addCImm(&Val);
289     return buildSplatVector(Res, Const);
290   }
291 
292   auto Const = buildInstr(TargetOpcode::G_CONSTANT);
293   Const->setDebugLoc(DebugLoc());
294   Res.addDefToMIB(*getMRI(), Const);
295   Const.addCImm(&Val);
296   return Const;
297 }
298 
299 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
300                                                     int64_t Val) {
301   auto IntN = IntegerType::get(getMF().getFunction().getContext(),
302                                Res.getLLTTy(*getMRI()).getScalarSizeInBits());
303   ConstantInt *CI = ConstantInt::get(IntN, Val, true);
304   return buildConstant(Res, *CI);
305 }
306 
307 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
308                                                      const ConstantFP &Val) {
309   LLT Ty = Res.getLLTTy(*getMRI());
310   LLT EltTy = Ty.getScalarType();
311 
312   assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
313          == EltTy.getSizeInBits() &&
314          "creating fconstant with the wrong size");
315 
316   assert(!Ty.isPointer() && "invalid operand type");
317 
318   if (Ty.isVector()) {
319     auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
320     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
321     .addFPImm(&Val);
322 
323     return buildSplatVector(Res, Const);
324   }
325 
326   auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
327   Res.addDefToMIB(*getMRI(), Const);
328   Const.addFPImm(&Val);
329   return Const;
330 }
331 
332 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
333                                                     const APInt &Val) {
334   ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
335   return buildConstant(Res, *CI);
336 }
337 
338 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
339                                                      double Val) {
340   LLT DstTy = Res.getLLTTy(*getMRI());
341   auto &Ctx = getMF().getFunction().getContext();
342   auto *CFP =
343       ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
344   return buildFConstant(Res, *CFP);
345 }
346 
347 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
348                                                      const APFloat &Val) {
349   auto &Ctx = getMF().getFunction().getContext();
350   auto *CFP = ConstantFP::get(Ctx, Val);
351   return buildFConstant(Res, *CFP);
352 }
353 
354 MachineInstrBuilder MachineIRBuilder::buildBrCond(Register Tst,
355                                                   MachineBasicBlock &Dest) {
356   assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
357 
358   return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
359 }
360 
361 MachineInstrBuilder MachineIRBuilder::buildLoad(const DstOp &Res,
362                                                 const SrcOp &Addr,
363                                                 MachineMemOperand &MMO) {
364   return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
365 }
366 
367 MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
368                                                      const DstOp &Res,
369                                                      const SrcOp &Addr,
370                                                      MachineMemOperand &MMO) {
371   assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
372   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
373 
374   auto MIB = buildInstr(Opcode);
375   Res.addDefToMIB(*getMRI(), MIB);
376   Addr.addSrcToMIB(MIB);
377   MIB.addMemOperand(&MMO);
378   return MIB;
379 }
380 
381 MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
382                                                  const SrcOp &Addr,
383                                                  MachineMemOperand &MMO) {
384   assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
385   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
386 
387   auto MIB = buildInstr(TargetOpcode::G_STORE);
388   Val.addSrcToMIB(MIB);
389   Addr.addSrcToMIB(MIB);
390   MIB.addMemOperand(&MMO);
391   return MIB;
392 }
393 
394 MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
395                                                   const SrcOp &Op) {
396   return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
397 }
398 
399 MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
400                                                 const SrcOp &Op) {
401   return buildInstr(TargetOpcode::G_SEXT, Res, Op);
402 }
403 
404 MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
405                                                 const SrcOp &Op) {
406   return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
407 }
408 
409 unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
410   const auto *TLI = getMF().getSubtarget().getTargetLowering();
411   switch (TLI->getBooleanContents(IsVec, IsFP)) {
412   case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
413     return TargetOpcode::G_SEXT;
414   case TargetLoweringBase::ZeroOrOneBooleanContent:
415     return TargetOpcode::G_ZEXT;
416   default:
417     return TargetOpcode::G_ANYEXT;
418   }
419 }
420 
421 MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
422                                                    const SrcOp &Op,
423                                                    bool IsFP) {
424   unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
425   return buildInstr(ExtOp, Res, Op);
426 }
427 
428 MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
429                                                       const DstOp &Res,
430                                                       const SrcOp &Op) {
431   assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
432           TargetOpcode::G_SEXT == ExtOpc) &&
433          "Expecting Extending Opc");
434   assert(Res.getLLTTy(*getMRI()).isScalar() ||
435          Res.getLLTTy(*getMRI()).isVector());
436   assert(Res.getLLTTy(*getMRI()).isScalar() ==
437          Op.getLLTTy(*getMRI()).isScalar());
438 
439   unsigned Opcode = TargetOpcode::COPY;
440   if (Res.getLLTTy(*getMRI()).getSizeInBits() >
441       Op.getLLTTy(*getMRI()).getSizeInBits())
442     Opcode = ExtOpc;
443   else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
444            Op.getLLTTy(*getMRI()).getSizeInBits())
445     Opcode = TargetOpcode::G_TRUNC;
446   else
447     assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
448 
449   return buildInstr(Opcode, Res, Op);
450 }
451 
452 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
453                                                        const SrcOp &Op) {
454   return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
455 }
456 
457 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
458                                                        const SrcOp &Op) {
459   return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
460 }
461 
462 MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
463                                                          const SrcOp &Op) {
464   return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
465 }
466 
467 MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
468                                                 const SrcOp &Src) {
469   LLT SrcTy = Src.getLLTTy(*getMRI());
470   LLT DstTy = Dst.getLLTTy(*getMRI());
471   if (SrcTy == DstTy)
472     return buildCopy(Dst, Src);
473 
474   unsigned Opcode;
475   if (SrcTy.isPointer() && DstTy.isScalar())
476     Opcode = TargetOpcode::G_PTRTOINT;
477   else if (DstTy.isPointer() && SrcTy.isScalar())
478     Opcode = TargetOpcode::G_INTTOPTR;
479   else {
480     assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
481     Opcode = TargetOpcode::G_BITCAST;
482   }
483 
484   return buildInstr(Opcode, Dst, Src);
485 }
486 
487 MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
488                                                    const SrcOp &Src,
489                                                    uint64_t Index) {
490   LLT SrcTy = Src.getLLTTy(*getMRI());
491   LLT DstTy = Dst.getLLTTy(*getMRI());
492 
493 #ifndef NDEBUG
494   assert(SrcTy.isValid() && "invalid operand type");
495   assert(DstTy.isValid() && "invalid operand type");
496   assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
497          "extracting off end of register");
498 #endif
499 
500   if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
501     assert(Index == 0 && "insertion past the end of a register");
502     return buildCast(Dst, Src);
503   }
504 
505   auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
506   Dst.addDefToMIB(*getMRI(), Extract);
507   Src.addSrcToMIB(Extract);
508   Extract.addImm(Index);
509   return Extract;
510 }
511 
512 void MachineIRBuilder::buildSequence(Register Res, ArrayRef<Register> Ops,
513                                      ArrayRef<uint64_t> Indices) {
514 #ifndef NDEBUG
515   assert(Ops.size() == Indices.size() && "incompatible args");
516   assert(!Ops.empty() && "invalid trivial sequence");
517   assert(llvm::is_sorted(Indices) &&
518          "sequence offsets must be in ascending order");
519 
520   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
521   for (auto Op : Ops)
522     assert(getMRI()->getType(Op).isValid() && "invalid operand type");
523 #endif
524 
525   LLT ResTy = getMRI()->getType(Res);
526   LLT OpTy = getMRI()->getType(Ops[0]);
527   unsigned OpSize = OpTy.getSizeInBits();
528   bool MaybeMerge = true;
529   for (unsigned i = 0; i < Ops.size(); ++i) {
530     if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
531       MaybeMerge = false;
532       break;
533     }
534   }
535 
536   if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
537     buildMerge(Res, Ops);
538     return;
539   }
540 
541   Register ResIn = getMRI()->createGenericVirtualRegister(ResTy);
542   buildUndef(ResIn);
543 
544   for (unsigned i = 0; i < Ops.size(); ++i) {
545     Register ResOut = i + 1 == Ops.size()
546                           ? Res
547                           : getMRI()->createGenericVirtualRegister(ResTy);
548     buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
549     ResIn = ResOut;
550   }
551 }
552 
553 MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
554   return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
555 }
556 
557 MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
558                                                  ArrayRef<Register> Ops) {
559   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
560   // we need some temporary storage for the DstOp objects. Here we use a
561   // sufficiently large SmallVector to not go through the heap.
562   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
563   assert(TmpVec.size() > 1);
564   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
565 }
566 
567 MachineInstrBuilder
568 MachineIRBuilder::buildMerge(const DstOp &Res,
569                              std::initializer_list<SrcOp> Ops) {
570   assert(Ops.size() > 1);
571   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, Ops);
572 }
573 
574 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
575                                                    const SrcOp &Op) {
576   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
577   // we need some temporary storage for the DstOp objects. Here we use a
578   // sufficiently large SmallVector to not go through the heap.
579   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
580   assert(TmpVec.size() > 1);
581   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
582 }
583 
584 MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
585                                                    const SrcOp &Op) {
586   unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
587   SmallVector<Register, 8> TmpVec;
588   for (unsigned I = 0; I != NumReg; ++I)
589     TmpVec.push_back(getMRI()->createGenericVirtualRegister(Res));
590   return buildUnmerge(TmpVec, Op);
591 }
592 
593 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
594                                                    const SrcOp &Op) {
595   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
596   // we need some temporary storage for the DstOp objects. Here we use a
597   // sufficiently large SmallVector to not go through the heap.
598   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
599   assert(TmpVec.size() > 1);
600   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
601 }
602 
603 MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
604                                                        ArrayRef<Register> Ops) {
605   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
606   // we need some temporary storage for the DstOp objects. Here we use a
607   // sufficiently large SmallVector to not go through the heap.
608   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
609   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
610 }
611 
612 MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
613                                                        const SrcOp &Src) {
614   SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
615   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
616 }
617 
618 MachineInstrBuilder
619 MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
620                                         ArrayRef<Register> Ops) {
621   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
622   // we need some temporary storage for the DstOp objects. Here we use a
623   // sufficiently large SmallVector to not go through the heap.
624   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
625   return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
626 }
627 
628 MachineInstrBuilder
629 MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
630   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
631   // we need some temporary storage for the DstOp objects. Here we use a
632   // sufficiently large SmallVector to not go through the heap.
633   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
634   return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
635 }
636 
637 MachineInstrBuilder MachineIRBuilder::buildInsert(const DstOp &Res,
638                                                   const SrcOp &Src,
639                                                   const SrcOp &Op,
640                                                   unsigned Index) {
641   assert(Index + Op.getLLTTy(*getMRI()).getSizeInBits() <=
642              Res.getLLTTy(*getMRI()).getSizeInBits() &&
643          "insertion past the end of a register");
644 
645   if (Res.getLLTTy(*getMRI()).getSizeInBits() ==
646       Op.getLLTTy(*getMRI()).getSizeInBits()) {
647     return buildCast(Res, Op);
648   }
649 
650   return buildInstr(TargetOpcode::G_INSERT, Res, {Src, Op, uint64_t(Index)});
651 }
652 
653 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
654                                                      ArrayRef<Register> ResultRegs,
655                                                      bool HasSideEffects) {
656   auto MIB =
657       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
658                                 : TargetOpcode::G_INTRINSIC);
659   for (unsigned ResultReg : ResultRegs)
660     MIB.addDef(ResultReg);
661   MIB.addIntrinsicID(ID);
662   return MIB;
663 }
664 
665 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
666                                                      ArrayRef<DstOp> Results,
667                                                      bool HasSideEffects) {
668   auto MIB =
669       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
670                                 : TargetOpcode::G_INTRINSIC);
671   for (DstOp Result : Results)
672     Result.addDefToMIB(*getMRI(), MIB);
673   MIB.addIntrinsicID(ID);
674   return MIB;
675 }
676 
677 MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
678                                                  const SrcOp &Op) {
679   return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
680 }
681 
682 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
683                                                    const SrcOp &Op,
684                                                    Optional<unsigned> Flags) {
685   return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op, Flags);
686 }
687 
688 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
689                                                 const DstOp &Res,
690                                                 const SrcOp &Op0,
691                                                 const SrcOp &Op1) {
692   return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
693 }
694 
695 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
696                                                 const DstOp &Res,
697                                                 const SrcOp &Op0,
698                                                 const SrcOp &Op1,
699                                                 Optional<unsigned> Flags) {
700 
701   return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
702 }
703 
704 MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
705                                                   const SrcOp &Tst,
706                                                   const SrcOp &Op0,
707                                                   const SrcOp &Op1,
708                                                   Optional<unsigned> Flags) {
709 
710   return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
711 }
712 
713 MachineInstrBuilder
714 MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
715                                            const SrcOp &Elt, const SrcOp &Idx) {
716   return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
717 }
718 
719 MachineInstrBuilder
720 MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
721                                             const SrcOp &Idx) {
722   return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
723 }
724 
725 MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
726     Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
727     Register NewVal, MachineMemOperand &MMO) {
728 #ifndef NDEBUG
729   LLT OldValResTy = getMRI()->getType(OldValRes);
730   LLT SuccessResTy = getMRI()->getType(SuccessRes);
731   LLT AddrTy = getMRI()->getType(Addr);
732   LLT CmpValTy = getMRI()->getType(CmpVal);
733   LLT NewValTy = getMRI()->getType(NewVal);
734   assert(OldValResTy.isScalar() && "invalid operand type");
735   assert(SuccessResTy.isScalar() && "invalid operand type");
736   assert(AddrTy.isPointer() && "invalid operand type");
737   assert(CmpValTy.isValid() && "invalid operand type");
738   assert(NewValTy.isValid() && "invalid operand type");
739   assert(OldValResTy == CmpValTy && "type mismatch");
740   assert(OldValResTy == NewValTy && "type mismatch");
741 #endif
742 
743   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
744       .addDef(OldValRes)
745       .addDef(SuccessRes)
746       .addUse(Addr)
747       .addUse(CmpVal)
748       .addUse(NewVal)
749       .addMemOperand(&MMO);
750 }
751 
752 MachineInstrBuilder
753 MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
754                                      Register CmpVal, Register NewVal,
755                                      MachineMemOperand &MMO) {
756 #ifndef NDEBUG
757   LLT OldValResTy = getMRI()->getType(OldValRes);
758   LLT AddrTy = getMRI()->getType(Addr);
759   LLT CmpValTy = getMRI()->getType(CmpVal);
760   LLT NewValTy = getMRI()->getType(NewVal);
761   assert(OldValResTy.isScalar() && "invalid operand type");
762   assert(AddrTy.isPointer() && "invalid operand type");
763   assert(CmpValTy.isValid() && "invalid operand type");
764   assert(NewValTy.isValid() && "invalid operand type");
765   assert(OldValResTy == CmpValTy && "type mismatch");
766   assert(OldValResTy == NewValTy && "type mismatch");
767 #endif
768 
769   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
770       .addDef(OldValRes)
771       .addUse(Addr)
772       .addUse(CmpVal)
773       .addUse(NewVal)
774       .addMemOperand(&MMO);
775 }
776 
777 MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(
778   unsigned Opcode, const DstOp &OldValRes,
779   const SrcOp &Addr, const SrcOp &Val,
780   MachineMemOperand &MMO) {
781 
782 #ifndef NDEBUG
783   LLT OldValResTy = OldValRes.getLLTTy(*getMRI());
784   LLT AddrTy = Addr.getLLTTy(*getMRI());
785   LLT ValTy = Val.getLLTTy(*getMRI());
786   assert(OldValResTy.isScalar() && "invalid operand type");
787   assert(AddrTy.isPointer() && "invalid operand type");
788   assert(ValTy.isValid() && "invalid operand type");
789   assert(OldValResTy == ValTy && "type mismatch");
790   assert(MMO.isAtomic() && "not atomic mem operand");
791 #endif
792 
793   auto MIB = buildInstr(Opcode);
794   OldValRes.addDefToMIB(*getMRI(), MIB);
795   Addr.addSrcToMIB(MIB);
796   Val.addSrcToMIB(MIB);
797   MIB.addMemOperand(&MMO);
798   return MIB;
799 }
800 
801 MachineInstrBuilder
802 MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
803                                      Register Val, MachineMemOperand &MMO) {
804   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
805                         MMO);
806 }
807 MachineInstrBuilder
808 MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
809                                     Register Val, MachineMemOperand &MMO) {
810   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
811                         MMO);
812 }
813 MachineInstrBuilder
814 MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
815                                     Register Val, MachineMemOperand &MMO) {
816   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
817                         MMO);
818 }
819 MachineInstrBuilder
820 MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
821                                     Register Val, MachineMemOperand &MMO) {
822   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
823                         MMO);
824 }
825 MachineInstrBuilder
826 MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
827                                      Register Val, MachineMemOperand &MMO) {
828   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
829                         MMO);
830 }
831 MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
832                                                        Register Addr,
833                                                        Register Val,
834                                                        MachineMemOperand &MMO) {
835   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
836                         MMO);
837 }
838 MachineInstrBuilder
839 MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
840                                     Register Val, MachineMemOperand &MMO) {
841   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
842                         MMO);
843 }
844 MachineInstrBuilder
845 MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
846                                     Register Val, MachineMemOperand &MMO) {
847   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
848                         MMO);
849 }
850 MachineInstrBuilder
851 MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
852                                     Register Val, MachineMemOperand &MMO) {
853   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
854                         MMO);
855 }
856 MachineInstrBuilder
857 MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
858                                      Register Val, MachineMemOperand &MMO) {
859   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
860                         MMO);
861 }
862 MachineInstrBuilder
863 MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
864                                      Register Val, MachineMemOperand &MMO) {
865   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
866                         MMO);
867 }
868 
869 MachineInstrBuilder
870 MachineIRBuilder::buildAtomicRMWFAdd(
871   const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
872   MachineMemOperand &MMO) {
873   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
874                         MMO);
875 }
876 
877 MachineInstrBuilder
878 MachineIRBuilder::buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
879                                      MachineMemOperand &MMO) {
880   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
881                         MMO);
882 }
883 
884 MachineInstrBuilder
885 MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
886   return buildInstr(TargetOpcode::G_FENCE)
887     .addImm(Ordering)
888     .addImm(Scope);
889 }
890 
891 MachineInstrBuilder
892 MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
893 #ifndef NDEBUG
894   assert(getMRI()->getType(Res).isPointer() && "invalid res type");
895 #endif
896 
897   return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
898 }
899 
900 void MachineIRBuilder::validateTruncExt(const LLT DstTy, const LLT SrcTy,
901                                         bool IsExtend) {
902 #ifndef NDEBUG
903   if (DstTy.isVector()) {
904     assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
905     assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
906            "different number of elements in a trunc/ext");
907   } else
908     assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
909 
910   if (IsExtend)
911     assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
912            "invalid narrowing extend");
913   else
914     assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
915            "invalid widening trunc");
916 #endif
917 }
918 
919 void MachineIRBuilder::validateSelectOp(const LLT ResTy, const LLT TstTy,
920                                         const LLT Op0Ty, const LLT Op1Ty) {
921 #ifndef NDEBUG
922   assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
923          "invalid operand type");
924   assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
925   if (ResTy.isScalar() || ResTy.isPointer())
926     assert(TstTy.isScalar() && "type mismatch");
927   else
928     assert((TstTy.isScalar() ||
929             (TstTy.isVector() &&
930              TstTy.getNumElements() == Op0Ty.getNumElements())) &&
931            "type mismatch");
932 #endif
933 }
934 
935 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
936                                                  ArrayRef<DstOp> DstOps,
937                                                  ArrayRef<SrcOp> SrcOps,
938                                                  Optional<unsigned> Flags) {
939   switch (Opc) {
940   default:
941     break;
942   case TargetOpcode::G_SELECT: {
943     assert(DstOps.size() == 1 && "Invalid select");
944     assert(SrcOps.size() == 3 && "Invalid select");
945     validateSelectOp(
946         DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
947         SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
948     break;
949   }
950   case TargetOpcode::G_ADD:
951   case TargetOpcode::G_AND:
952   case TargetOpcode::G_MUL:
953   case TargetOpcode::G_OR:
954   case TargetOpcode::G_SUB:
955   case TargetOpcode::G_XOR:
956   case TargetOpcode::G_UDIV:
957   case TargetOpcode::G_SDIV:
958   case TargetOpcode::G_UREM:
959   case TargetOpcode::G_SREM:
960   case TargetOpcode::G_SMIN:
961   case TargetOpcode::G_SMAX:
962   case TargetOpcode::G_UMIN:
963   case TargetOpcode::G_UMAX:
964   case TargetOpcode::G_UADDSAT:
965   case TargetOpcode::G_SADDSAT:
966   case TargetOpcode::G_USUBSAT:
967   case TargetOpcode::G_SSUBSAT: {
968     // All these are binary ops.
969     assert(DstOps.size() == 1 && "Invalid Dst");
970     assert(SrcOps.size() == 2 && "Invalid Srcs");
971     validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
972                      SrcOps[0].getLLTTy(*getMRI()),
973                      SrcOps[1].getLLTTy(*getMRI()));
974     break;
975   }
976   case TargetOpcode::G_SHL:
977   case TargetOpcode::G_ASHR:
978   case TargetOpcode::G_LSHR: {
979     assert(DstOps.size() == 1 && "Invalid Dst");
980     assert(SrcOps.size() == 2 && "Invalid Srcs");
981     validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
982                     SrcOps[0].getLLTTy(*getMRI()),
983                     SrcOps[1].getLLTTy(*getMRI()));
984     break;
985   }
986   case TargetOpcode::G_SEXT:
987   case TargetOpcode::G_ZEXT:
988   case TargetOpcode::G_ANYEXT:
989     assert(DstOps.size() == 1 && "Invalid Dst");
990     assert(SrcOps.size() == 1 && "Invalid Srcs");
991     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
992                      SrcOps[0].getLLTTy(*getMRI()), true);
993     break;
994   case TargetOpcode::G_TRUNC:
995   case TargetOpcode::G_FPTRUNC: {
996     assert(DstOps.size() == 1 && "Invalid Dst");
997     assert(SrcOps.size() == 1 && "Invalid Srcs");
998     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
999                      SrcOps[0].getLLTTy(*getMRI()), false);
1000     break;
1001   }
1002   case TargetOpcode::G_BITCAST: {
1003     assert(DstOps.size() == 1 && "Invalid Dst");
1004     assert(SrcOps.size() == 1 && "Invalid Srcs");
1005     assert(DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1006            SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() && "invalid bitcast");
1007     break;
1008   }
1009   case TargetOpcode::COPY:
1010     assert(DstOps.size() == 1 && "Invalid Dst");
1011     // If the caller wants to add a subreg source it has to be done separately
1012     // so we may not have any SrcOps at this point yet.
1013     break;
1014   case TargetOpcode::G_FCMP:
1015   case TargetOpcode::G_ICMP: {
1016     assert(DstOps.size() == 1 && "Invalid Dst Operands");
1017     assert(SrcOps.size() == 3 && "Invalid Src Operands");
1018     // For F/ICMP, the first src operand is the predicate, followed by
1019     // the two comparands.
1020     assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
1021            "Expecting predicate");
1022     assert([&]() -> bool {
1023       CmpInst::Predicate Pred = SrcOps[0].getPredicate();
1024       return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
1025                                          : CmpInst::isFPPredicate(Pred);
1026     }() && "Invalid predicate");
1027     assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1028            "Type mismatch");
1029     assert([&]() -> bool {
1030       LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
1031       LLT DstTy = DstOps[0].getLLTTy(*getMRI());
1032       if (Op0Ty.isScalar() || Op0Ty.isPointer())
1033         return DstTy.isScalar();
1034       else
1035         return DstTy.isVector() &&
1036                DstTy.getNumElements() == Op0Ty.getNumElements();
1037     }() && "Type Mismatch");
1038     break;
1039   }
1040   case TargetOpcode::G_UNMERGE_VALUES: {
1041     assert(!DstOps.empty() && "Invalid trivial sequence");
1042     assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
1043     assert(std::all_of(DstOps.begin(), DstOps.end(),
1044                        [&, this](const DstOp &Op) {
1045                          return Op.getLLTTy(*getMRI()) ==
1046                                 DstOps[0].getLLTTy(*getMRI());
1047                        }) &&
1048            "type mismatch in output list");
1049     assert(DstOps.size() * DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1050                SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1051            "input operands do not cover output register");
1052     break;
1053   }
1054   case TargetOpcode::G_MERGE_VALUES: {
1055     assert(!SrcOps.empty() && "invalid trivial sequence");
1056     assert(DstOps.size() == 1 && "Invalid Dst");
1057     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1058                        [&, this](const SrcOp &Op) {
1059                          return Op.getLLTTy(*getMRI()) ==
1060                                 SrcOps[0].getLLTTy(*getMRI());
1061                        }) &&
1062            "type mismatch in input list");
1063     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1064                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1065            "input operands do not cover output register");
1066     if (SrcOps.size() == 1)
1067       return buildCast(DstOps[0], SrcOps[0]);
1068     if (DstOps[0].getLLTTy(*getMRI()).isVector()) {
1069       if (SrcOps[0].getLLTTy(*getMRI()).isVector())
1070         return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
1071       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1072     }
1073     break;
1074   }
1075   case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
1076     assert(DstOps.size() == 1 && "Invalid Dst size");
1077     assert(SrcOps.size() == 2 && "Invalid Src size");
1078     assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1079     assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
1080             DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
1081            "Invalid operand type");
1082     assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
1083     assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
1084                DstOps[0].getLLTTy(*getMRI()) &&
1085            "Type mismatch");
1086     break;
1087   }
1088   case TargetOpcode::G_INSERT_VECTOR_ELT: {
1089     assert(DstOps.size() == 1 && "Invalid dst size");
1090     assert(SrcOps.size() == 3 && "Invalid src size");
1091     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1092            SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1093     assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
1094                SrcOps[1].getLLTTy(*getMRI()) &&
1095            "Type mismatch");
1096     assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
1097     assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
1098                SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
1099            "Type mismatch");
1100     break;
1101   }
1102   case TargetOpcode::G_BUILD_VECTOR: {
1103     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1104            "Must have at least 2 operands");
1105     assert(DstOps.size() == 1 && "Invalid DstOps");
1106     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1107            "Res type must be a vector");
1108     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1109                        [&, this](const SrcOp &Op) {
1110                          return Op.getLLTTy(*getMRI()) ==
1111                                 SrcOps[0].getLLTTy(*getMRI());
1112                        }) &&
1113            "type mismatch in input list");
1114     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1115                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1116            "input scalars do not exactly cover the output vector register");
1117     break;
1118   }
1119   case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1120     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1121            "Must have at least 2 operands");
1122     assert(DstOps.size() == 1 && "Invalid DstOps");
1123     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1124            "Res type must be a vector");
1125     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1126                        [&, this](const SrcOp &Op) {
1127                          return Op.getLLTTy(*getMRI()) ==
1128                                 SrcOps[0].getLLTTy(*getMRI());
1129                        }) &&
1130            "type mismatch in input list");
1131     if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1132         DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
1133       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1134     break;
1135   }
1136   case TargetOpcode::G_CONCAT_VECTORS: {
1137     assert(DstOps.size() == 1 && "Invalid DstOps");
1138     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1139            "Must have at least 2 operands");
1140     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1141                        [&, this](const SrcOp &Op) {
1142                          return (Op.getLLTTy(*getMRI()).isVector() &&
1143                                  Op.getLLTTy(*getMRI()) ==
1144                                      SrcOps[0].getLLTTy(*getMRI()));
1145                        }) &&
1146            "type mismatch in input list");
1147     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1148                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1149            "input vectors do not exactly cover the output vector register");
1150     break;
1151   }
1152   case TargetOpcode::G_UADDE: {
1153     assert(DstOps.size() == 2 && "Invalid no of dst operands");
1154     assert(SrcOps.size() == 3 && "Invalid no of src operands");
1155     assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1156     assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1157            (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1158            "Invalid operand");
1159     assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1160     assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1161            "type mismatch");
1162     break;
1163   }
1164   }
1165 
1166   auto MIB = buildInstr(Opc);
1167   for (const DstOp &Op : DstOps)
1168     Op.addDefToMIB(*getMRI(), MIB);
1169   for (const SrcOp &Op : SrcOps)
1170     Op.addSrcToMIB(MIB);
1171   if (Flags)
1172     MIB->setFlags(*Flags);
1173   return MIB;
1174 }
1175