1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 /// \file
9 /// This file implements the MachineIRBuidler class.
10 //===----------------------------------------------------------------------===//
11 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12 #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
13 
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetLowering.h"
20 #include "llvm/CodeGen/TargetOpcodes.h"
21 #include "llvm/CodeGen/TargetSubtargetInfo.h"
22 #include "llvm/IR/DebugInfo.h"
23 
24 using namespace llvm;
25 
26 void MachineIRBuilder::setMF(MachineFunction &MF) {
27   State.MF = &MF;
28   State.MBB = nullptr;
29   State.MRI = &MF.getRegInfo();
30   State.TII = MF.getSubtarget().getInstrInfo();
31   State.DL = DebugLoc();
32   State.II = MachineBasicBlock::iterator();
33   State.Observer = nullptr;
34 }
35 
36 void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) {
37   State.MBB = &MBB;
38   State.II = MBB.end();
39   assert(&getMF() == MBB.getParent() &&
40          "Basic block is in a different function");
41 }
42 
43 void MachineIRBuilder::setInstr(MachineInstr &MI) {
44   assert(MI.getParent() && "Instruction is not part of a basic block");
45   setMBB(*MI.getParent());
46   State.II = MI.getIterator();
47 }
48 
49 void MachineIRBuilder::setCSEInfo(GISelCSEInfo *Info) { State.CSEInfo = Info; }
50 
51 void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB,
52                                    MachineBasicBlock::iterator II) {
53   assert(MBB.getParent() == &getMF() &&
54          "Basic block is in a different function");
55   State.MBB = &MBB;
56   State.II = II;
57 }
58 
59 void MachineIRBuilder::recordInsertion(MachineInstr *InsertedInstr) const {
60   if (State.Observer)
61     State.Observer->createdInstr(*InsertedInstr);
62 }
63 
64 void MachineIRBuilder::setChangeObserver(GISelChangeObserver &Observer) {
65   State.Observer = &Observer;
66 }
67 
68 void MachineIRBuilder::stopObservingChanges() { State.Observer = nullptr; }
69 
70 //------------------------------------------------------------------------------
71 // Build instruction variants.
72 //------------------------------------------------------------------------------
73 
74 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) {
75   return insertInstr(buildInstrNoInsert(Opcode));
76 }
77 
78 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
79   MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80   return MIB;
81 }
82 
83 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
84   getMBB().insert(getInsertPt(), MIB);
85   recordInsertion(MIB);
86   return MIB;
87 }
88 
89 MachineInstrBuilder
90 MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
91                                       const MDNode *Expr) {
92   assert(isa<DILocalVariable>(Variable) && "not a variable");
93   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94   assert(
95       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96       "Expected inlined-at fields to agree");
97   return insertInstr(BuildMI(getMF(), getDL(),
98                              getTII().get(TargetOpcode::DBG_VALUE),
99                              /*IsIndirect*/ false, Reg, Variable, Expr));
100 }
101 
102 MachineInstrBuilder
103 MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
104                                         const MDNode *Expr) {
105   assert(isa<DILocalVariable>(Variable) && "not a variable");
106   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
107   assert(
108       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
109       "Expected inlined-at fields to agree");
110   return insertInstr(BuildMI(getMF(), getDL(),
111                              getTII().get(TargetOpcode::DBG_VALUE),
112                              /*IsIndirect*/ true, Reg, Variable, Expr));
113 }
114 
115 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
116                                                       const MDNode *Variable,
117                                                       const MDNode *Expr) {
118   assert(isa<DILocalVariable>(Variable) && "not a variable");
119   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
120   assert(
121       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
122       "Expected inlined-at fields to agree");
123   return buildInstr(TargetOpcode::DBG_VALUE)
124       .addFrameIndex(FI)
125       .addImm(0)
126       .addMetadata(Variable)
127       .addMetadata(Expr);
128 }
129 
130 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
131                                                          const MDNode *Variable,
132                                                          const MDNode *Expr) {
133   assert(isa<DILocalVariable>(Variable) && "not a variable");
134   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
135   assert(
136       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
137       "Expected inlined-at fields to agree");
138   auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
139   if (auto *CI = dyn_cast<ConstantInt>(&C)) {
140     if (CI->getBitWidth() > 64)
141       MIB.addCImm(CI);
142     else
143       MIB.addImm(CI->getZExtValue());
144   } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
145     MIB.addFPImm(CFP);
146   } else {
147     // Insert %noreg if we didn't find a usable constant and had to drop it.
148     MIB.addReg(0U);
149   }
150 
151   return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
152 }
153 
154 MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
155   assert(isa<DILabel>(Label) && "not a label");
156   assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
157          "Expected inlined-at fields to agree");
158   auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
159 
160   return MIB.addMetadata(Label);
161 }
162 
163 MachineInstrBuilder MachineIRBuilder::buildDynStackAlloc(const DstOp &Res,
164                                                          const SrcOp &Size,
165                                                          Align Alignment) {
166   assert(Res.getLLTTy(*getMRI()).isPointer() && "expected ptr dst type");
167   auto MIB = buildInstr(TargetOpcode::G_DYN_STACKALLOC);
168   Res.addDefToMIB(*getMRI(), MIB);
169   Size.addSrcToMIB(MIB);
170   MIB.addImm(Alignment.value());
171   return MIB;
172 }
173 
174 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
175                                                       int Idx) {
176   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
177   auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
178   Res.addDefToMIB(*getMRI(), MIB);
179   MIB.addFrameIndex(Idx);
180   return MIB;
181 }
182 
183 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
184                                                        const GlobalValue *GV) {
185   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
186   assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
187              GV->getType()->getAddressSpace() &&
188          "address space mismatch");
189 
190   auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
191   Res.addDefToMIB(*getMRI(), MIB);
192   MIB.addGlobalAddress(GV);
193   return MIB;
194 }
195 
196 MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
197                                                      unsigned JTI) {
198   return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
199       .addJumpTableIndex(JTI);
200 }
201 
202 void MachineIRBuilder::validateBinaryOp(const LLT Res, const LLT Op0,
203                                         const LLT Op1) {
204   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
205   assert((Res == Op0 && Res == Op1) && "type mismatch");
206 }
207 
208 void MachineIRBuilder::validateShiftOp(const LLT Res, const LLT Op0,
209                                        const LLT Op1) {
210   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
211   assert((Res == Op0) && "type mismatch");
212 }
213 
214 MachineInstrBuilder MachineIRBuilder::buildPtrAdd(const DstOp &Res,
215                                                   const SrcOp &Op0,
216                                                   const SrcOp &Op1) {
217   assert(Res.getLLTTy(*getMRI()).getScalarType().isPointer() &&
218          Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
219   assert(Op1.getLLTTy(*getMRI()).getScalarType().isScalar() && "invalid offset type");
220 
221   return buildInstr(TargetOpcode::G_PTR_ADD, {Res}, {Op0, Op1});
222 }
223 
224 Optional<MachineInstrBuilder>
225 MachineIRBuilder::materializePtrAdd(Register &Res, Register Op0,
226                                     const LLT ValueTy, uint64_t Value) {
227   assert(Res == 0 && "Res is a result argument");
228   assert(ValueTy.isScalar()  && "invalid offset type");
229 
230   if (Value == 0) {
231     Res = Op0;
232     return None;
233   }
234 
235   Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
236   auto Cst = buildConstant(ValueTy, Value);
237   return buildPtrAdd(Res, Op0, Cst.getReg(0));
238 }
239 
240 MachineInstrBuilder MachineIRBuilder::buildMaskLowPtrBits(const DstOp &Res,
241                                                           const SrcOp &Op0,
242                                                           uint32_t NumBits) {
243   LLT PtrTy = Res.getLLTTy(*getMRI());
244   LLT MaskTy = LLT::scalar(PtrTy.getSizeInBits());
245   Register MaskReg = getMRI()->createGenericVirtualRegister(MaskTy);
246   buildConstant(MaskReg, maskTrailingZeros<uint64_t>(NumBits));
247   return buildPtrMask(Res, Op0, MaskReg);
248 }
249 
250 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
251   return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
252 }
253 
254 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
255   assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
256   return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
257 }
258 
259 MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
260                                                 unsigned JTI,
261                                                 Register IndexReg) {
262   assert(getMRI()->getType(TablePtr).isPointer() &&
263          "Table reg must be a pointer");
264   return buildInstr(TargetOpcode::G_BRJT)
265       .addUse(TablePtr)
266       .addJumpTableIndex(JTI)
267       .addUse(IndexReg);
268 }
269 
270 MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
271                                                 const SrcOp &Op) {
272   return buildInstr(TargetOpcode::COPY, Res, Op);
273 }
274 
275 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
276                                                     const ConstantInt &Val) {
277   LLT Ty = Res.getLLTTy(*getMRI());
278   LLT EltTy = Ty.getScalarType();
279   assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
280          "creating constant with the wrong size");
281 
282   if (Ty.isVector()) {
283     auto Const = buildInstr(TargetOpcode::G_CONSTANT)
284     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
285     .addCImm(&Val);
286     return buildSplatVector(Res, Const);
287   }
288 
289   auto Const = buildInstr(TargetOpcode::G_CONSTANT);
290   Const->setDebugLoc(DebugLoc());
291   Res.addDefToMIB(*getMRI(), Const);
292   Const.addCImm(&Val);
293   return Const;
294 }
295 
296 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
297                                                     int64_t Val) {
298   auto IntN = IntegerType::get(getMF().getFunction().getContext(),
299                                Res.getLLTTy(*getMRI()).getScalarSizeInBits());
300   ConstantInt *CI = ConstantInt::get(IntN, Val, true);
301   return buildConstant(Res, *CI);
302 }
303 
304 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
305                                                      const ConstantFP &Val) {
306   LLT Ty = Res.getLLTTy(*getMRI());
307   LLT EltTy = Ty.getScalarType();
308 
309   assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
310          == EltTy.getSizeInBits() &&
311          "creating fconstant with the wrong size");
312 
313   assert(!Ty.isPointer() && "invalid operand type");
314 
315   if (Ty.isVector()) {
316     auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
317     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
318     .addFPImm(&Val);
319 
320     return buildSplatVector(Res, Const);
321   }
322 
323   auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
324   Const->setDebugLoc(DebugLoc());
325   Res.addDefToMIB(*getMRI(), Const);
326   Const.addFPImm(&Val);
327   return Const;
328 }
329 
330 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
331                                                     const APInt &Val) {
332   ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
333   return buildConstant(Res, *CI);
334 }
335 
336 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
337                                                      double Val) {
338   LLT DstTy = Res.getLLTTy(*getMRI());
339   auto &Ctx = getMF().getFunction().getContext();
340   auto *CFP =
341       ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
342   return buildFConstant(Res, *CFP);
343 }
344 
345 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
346                                                      const APFloat &Val) {
347   auto &Ctx = getMF().getFunction().getContext();
348   auto *CFP = ConstantFP::get(Ctx, Val);
349   return buildFConstant(Res, *CFP);
350 }
351 
352 MachineInstrBuilder MachineIRBuilder::buildBrCond(Register Tst,
353                                                   MachineBasicBlock &Dest) {
354   assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
355 
356   return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
357 }
358 
359 MachineInstrBuilder MachineIRBuilder::buildLoad(const DstOp &Res,
360                                                 const SrcOp &Addr,
361                                                 MachineMemOperand &MMO) {
362   return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
363 }
364 
365 MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
366                                                      const DstOp &Res,
367                                                      const SrcOp &Addr,
368                                                      MachineMemOperand &MMO) {
369   assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
370   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
371 
372   auto MIB = buildInstr(Opcode);
373   Res.addDefToMIB(*getMRI(), MIB);
374   Addr.addSrcToMIB(MIB);
375   MIB.addMemOperand(&MMO);
376   return MIB;
377 }
378 
379 MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
380                                                  const SrcOp &Addr,
381                                                  MachineMemOperand &MMO) {
382   assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
383   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
384 
385   auto MIB = buildInstr(TargetOpcode::G_STORE);
386   Val.addSrcToMIB(MIB);
387   Addr.addSrcToMIB(MIB);
388   MIB.addMemOperand(&MMO);
389   return MIB;
390 }
391 
392 MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
393                                                   const SrcOp &Op) {
394   return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
395 }
396 
397 MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
398                                                 const SrcOp &Op) {
399   return buildInstr(TargetOpcode::G_SEXT, Res, Op);
400 }
401 
402 MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
403                                                 const SrcOp &Op) {
404   return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
405 }
406 
407 unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
408   const auto *TLI = getMF().getSubtarget().getTargetLowering();
409   switch (TLI->getBooleanContents(IsVec, IsFP)) {
410   case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
411     return TargetOpcode::G_SEXT;
412   case TargetLoweringBase::ZeroOrOneBooleanContent:
413     return TargetOpcode::G_ZEXT;
414   default:
415     return TargetOpcode::G_ANYEXT;
416   }
417 }
418 
419 MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
420                                                    const SrcOp &Op,
421                                                    bool IsFP) {
422   unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
423   return buildInstr(ExtOp, Res, Op);
424 }
425 
426 MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
427                                                       const DstOp &Res,
428                                                       const SrcOp &Op) {
429   assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
430           TargetOpcode::G_SEXT == ExtOpc) &&
431          "Expecting Extending Opc");
432   assert(Res.getLLTTy(*getMRI()).isScalar() ||
433          Res.getLLTTy(*getMRI()).isVector());
434   assert(Res.getLLTTy(*getMRI()).isScalar() ==
435          Op.getLLTTy(*getMRI()).isScalar());
436 
437   unsigned Opcode = TargetOpcode::COPY;
438   if (Res.getLLTTy(*getMRI()).getSizeInBits() >
439       Op.getLLTTy(*getMRI()).getSizeInBits())
440     Opcode = ExtOpc;
441   else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
442            Op.getLLTTy(*getMRI()).getSizeInBits())
443     Opcode = TargetOpcode::G_TRUNC;
444   else
445     assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
446 
447   return buildInstr(Opcode, Res, Op);
448 }
449 
450 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
451                                                        const SrcOp &Op) {
452   return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
453 }
454 
455 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
456                                                        const SrcOp &Op) {
457   return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
458 }
459 
460 MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
461                                                          const SrcOp &Op) {
462   return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
463 }
464 
465 MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
466                                                 const SrcOp &Src) {
467   LLT SrcTy = Src.getLLTTy(*getMRI());
468   LLT DstTy = Dst.getLLTTy(*getMRI());
469   if (SrcTy == DstTy)
470     return buildCopy(Dst, Src);
471 
472   unsigned Opcode;
473   if (SrcTy.isPointer() && DstTy.isScalar())
474     Opcode = TargetOpcode::G_PTRTOINT;
475   else if (DstTy.isPointer() && SrcTy.isScalar())
476     Opcode = TargetOpcode::G_INTTOPTR;
477   else {
478     assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
479     Opcode = TargetOpcode::G_BITCAST;
480   }
481 
482   return buildInstr(Opcode, Dst, Src);
483 }
484 
485 MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
486                                                    const SrcOp &Src,
487                                                    uint64_t Index) {
488   LLT SrcTy = Src.getLLTTy(*getMRI());
489   LLT DstTy = Dst.getLLTTy(*getMRI());
490 
491 #ifndef NDEBUG
492   assert(SrcTy.isValid() && "invalid operand type");
493   assert(DstTy.isValid() && "invalid operand type");
494   assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
495          "extracting off end of register");
496 #endif
497 
498   if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
499     assert(Index == 0 && "insertion past the end of a register");
500     return buildCast(Dst, Src);
501   }
502 
503   auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
504   Dst.addDefToMIB(*getMRI(), Extract);
505   Src.addSrcToMIB(Extract);
506   Extract.addImm(Index);
507   return Extract;
508 }
509 
510 void MachineIRBuilder::buildSequence(Register Res, ArrayRef<Register> Ops,
511                                      ArrayRef<uint64_t> Indices) {
512 #ifndef NDEBUG
513   assert(Ops.size() == Indices.size() && "incompatible args");
514   assert(!Ops.empty() && "invalid trivial sequence");
515   assert(llvm::is_sorted(Indices) &&
516          "sequence offsets must be in ascending order");
517 
518   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
519   for (auto Op : Ops)
520     assert(getMRI()->getType(Op).isValid() && "invalid operand type");
521 #endif
522 
523   LLT ResTy = getMRI()->getType(Res);
524   LLT OpTy = getMRI()->getType(Ops[0]);
525   unsigned OpSize = OpTy.getSizeInBits();
526   bool MaybeMerge = true;
527   for (unsigned i = 0; i < Ops.size(); ++i) {
528     if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
529       MaybeMerge = false;
530       break;
531     }
532   }
533 
534   if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
535     buildMerge(Res, Ops);
536     return;
537   }
538 
539   Register ResIn = getMRI()->createGenericVirtualRegister(ResTy);
540   buildUndef(ResIn);
541 
542   for (unsigned i = 0; i < Ops.size(); ++i) {
543     Register ResOut = i + 1 == Ops.size()
544                           ? Res
545                           : getMRI()->createGenericVirtualRegister(ResTy);
546     buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
547     ResIn = ResOut;
548   }
549 }
550 
551 MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
552   return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
553 }
554 
555 MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
556                                                  ArrayRef<Register> Ops) {
557   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
558   // we need some temporary storage for the DstOp objects. Here we use a
559   // sufficiently large SmallVector to not go through the heap.
560   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
561   assert(TmpVec.size() > 1);
562   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
563 }
564 
565 MachineInstrBuilder
566 MachineIRBuilder::buildMerge(const DstOp &Res,
567                              std::initializer_list<SrcOp> Ops) {
568   assert(Ops.size() > 1);
569   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, Ops);
570 }
571 
572 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
573                                                    const SrcOp &Op) {
574   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
575   // we need some temporary storage for the DstOp objects. Here we use a
576   // sufficiently large SmallVector to not go through the heap.
577   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
578   assert(TmpVec.size() > 1);
579   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
580 }
581 
582 MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
583                                                    const SrcOp &Op) {
584   unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
585   SmallVector<Register, 8> TmpVec;
586   for (unsigned I = 0; I != NumReg; ++I)
587     TmpVec.push_back(getMRI()->createGenericVirtualRegister(Res));
588   return buildUnmerge(TmpVec, Op);
589 }
590 
591 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
592                                                    const SrcOp &Op) {
593   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
594   // we need some temporary storage for the DstOp objects. Here we use a
595   // sufficiently large SmallVector to not go through the heap.
596   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
597   assert(TmpVec.size() > 1);
598   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
599 }
600 
601 MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
602                                                        ArrayRef<Register> Ops) {
603   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
604   // we need some temporary storage for the DstOp objects. Here we use a
605   // sufficiently large SmallVector to not go through the heap.
606   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
607   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
608 }
609 
610 MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
611                                                        const SrcOp &Src) {
612   SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
613   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
614 }
615 
616 MachineInstrBuilder
617 MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
618                                         ArrayRef<Register> Ops) {
619   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
620   // we need some temporary storage for the DstOp objects. Here we use a
621   // sufficiently large SmallVector to not go through the heap.
622   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
623   return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
624 }
625 
626 MachineInstrBuilder
627 MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
628   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
629   // we need some temporary storage for the DstOp objects. Here we use a
630   // sufficiently large SmallVector to not go through the heap.
631   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
632   return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
633 }
634 
635 MachineInstrBuilder MachineIRBuilder::buildInsert(const DstOp &Res,
636                                                   const SrcOp &Src,
637                                                   const SrcOp &Op,
638                                                   unsigned Index) {
639   assert(Index + Op.getLLTTy(*getMRI()).getSizeInBits() <=
640              Res.getLLTTy(*getMRI()).getSizeInBits() &&
641          "insertion past the end of a register");
642 
643   if (Res.getLLTTy(*getMRI()).getSizeInBits() ==
644       Op.getLLTTy(*getMRI()).getSizeInBits()) {
645     return buildCast(Res, Op);
646   }
647 
648   return buildInstr(TargetOpcode::G_INSERT, Res, {Src, Op, uint64_t(Index)});
649 }
650 
651 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
652                                                      ArrayRef<Register> ResultRegs,
653                                                      bool HasSideEffects) {
654   auto MIB =
655       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
656                                 : TargetOpcode::G_INTRINSIC);
657   for (unsigned ResultReg : ResultRegs)
658     MIB.addDef(ResultReg);
659   MIB.addIntrinsicID(ID);
660   return MIB;
661 }
662 
663 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
664                                                      ArrayRef<DstOp> Results,
665                                                      bool HasSideEffects) {
666   auto MIB =
667       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
668                                 : TargetOpcode::G_INTRINSIC);
669   for (DstOp Result : Results)
670     Result.addDefToMIB(*getMRI(), MIB);
671   MIB.addIntrinsicID(ID);
672   return MIB;
673 }
674 
675 MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
676                                                  const SrcOp &Op) {
677   return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
678 }
679 
680 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
681                                                    const SrcOp &Op,
682                                                    Optional<unsigned> Flags) {
683   return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op, Flags);
684 }
685 
686 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
687                                                 const DstOp &Res,
688                                                 const SrcOp &Op0,
689                                                 const SrcOp &Op1) {
690   return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
691 }
692 
693 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
694                                                 const DstOp &Res,
695                                                 const SrcOp &Op0,
696                                                 const SrcOp &Op1,
697                                                 Optional<unsigned> Flags) {
698 
699   return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
700 }
701 
702 MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
703                                                   const SrcOp &Tst,
704                                                   const SrcOp &Op0,
705                                                   const SrcOp &Op1,
706                                                   Optional<unsigned> Flags) {
707 
708   return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
709 }
710 
711 MachineInstrBuilder
712 MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
713                                            const SrcOp &Elt, const SrcOp &Idx) {
714   return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
715 }
716 
717 MachineInstrBuilder
718 MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
719                                             const SrcOp &Idx) {
720   return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
721 }
722 
723 MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
724     Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
725     Register NewVal, MachineMemOperand &MMO) {
726 #ifndef NDEBUG
727   LLT OldValResTy = getMRI()->getType(OldValRes);
728   LLT SuccessResTy = getMRI()->getType(SuccessRes);
729   LLT AddrTy = getMRI()->getType(Addr);
730   LLT CmpValTy = getMRI()->getType(CmpVal);
731   LLT NewValTy = getMRI()->getType(NewVal);
732   assert(OldValResTy.isScalar() && "invalid operand type");
733   assert(SuccessResTy.isScalar() && "invalid operand type");
734   assert(AddrTy.isPointer() && "invalid operand type");
735   assert(CmpValTy.isValid() && "invalid operand type");
736   assert(NewValTy.isValid() && "invalid operand type");
737   assert(OldValResTy == CmpValTy && "type mismatch");
738   assert(OldValResTy == NewValTy && "type mismatch");
739 #endif
740 
741   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
742       .addDef(OldValRes)
743       .addDef(SuccessRes)
744       .addUse(Addr)
745       .addUse(CmpVal)
746       .addUse(NewVal)
747       .addMemOperand(&MMO);
748 }
749 
750 MachineInstrBuilder
751 MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
752                                      Register CmpVal, Register NewVal,
753                                      MachineMemOperand &MMO) {
754 #ifndef NDEBUG
755   LLT OldValResTy = getMRI()->getType(OldValRes);
756   LLT AddrTy = getMRI()->getType(Addr);
757   LLT CmpValTy = getMRI()->getType(CmpVal);
758   LLT NewValTy = getMRI()->getType(NewVal);
759   assert(OldValResTy.isScalar() && "invalid operand type");
760   assert(AddrTy.isPointer() && "invalid operand type");
761   assert(CmpValTy.isValid() && "invalid operand type");
762   assert(NewValTy.isValid() && "invalid operand type");
763   assert(OldValResTy == CmpValTy && "type mismatch");
764   assert(OldValResTy == NewValTy && "type mismatch");
765 #endif
766 
767   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
768       .addDef(OldValRes)
769       .addUse(Addr)
770       .addUse(CmpVal)
771       .addUse(NewVal)
772       .addMemOperand(&MMO);
773 }
774 
775 MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(
776   unsigned Opcode, const DstOp &OldValRes,
777   const SrcOp &Addr, const SrcOp &Val,
778   MachineMemOperand &MMO) {
779 
780 #ifndef NDEBUG
781   LLT OldValResTy = OldValRes.getLLTTy(*getMRI());
782   LLT AddrTy = Addr.getLLTTy(*getMRI());
783   LLT ValTy = Val.getLLTTy(*getMRI());
784   assert(OldValResTy.isScalar() && "invalid operand type");
785   assert(AddrTy.isPointer() && "invalid operand type");
786   assert(ValTy.isValid() && "invalid operand type");
787   assert(OldValResTy == ValTy && "type mismatch");
788   assert(MMO.isAtomic() && "not atomic mem operand");
789 #endif
790 
791   auto MIB = buildInstr(Opcode);
792   OldValRes.addDefToMIB(*getMRI(), MIB);
793   Addr.addSrcToMIB(MIB);
794   Val.addSrcToMIB(MIB);
795   MIB.addMemOperand(&MMO);
796   return MIB;
797 }
798 
799 MachineInstrBuilder
800 MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
801                                      Register Val, MachineMemOperand &MMO) {
802   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
803                         MMO);
804 }
805 MachineInstrBuilder
806 MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
807                                     Register Val, MachineMemOperand &MMO) {
808   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
809                         MMO);
810 }
811 MachineInstrBuilder
812 MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
813                                     Register Val, MachineMemOperand &MMO) {
814   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
815                         MMO);
816 }
817 MachineInstrBuilder
818 MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
819                                     Register Val, MachineMemOperand &MMO) {
820   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
821                         MMO);
822 }
823 MachineInstrBuilder
824 MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
825                                      Register Val, MachineMemOperand &MMO) {
826   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
827                         MMO);
828 }
829 MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
830                                                        Register Addr,
831                                                        Register Val,
832                                                        MachineMemOperand &MMO) {
833   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
834                         MMO);
835 }
836 MachineInstrBuilder
837 MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
838                                     Register Val, MachineMemOperand &MMO) {
839   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
840                         MMO);
841 }
842 MachineInstrBuilder
843 MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
844                                     Register Val, MachineMemOperand &MMO) {
845   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
846                         MMO);
847 }
848 MachineInstrBuilder
849 MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
850                                     Register Val, MachineMemOperand &MMO) {
851   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
852                         MMO);
853 }
854 MachineInstrBuilder
855 MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
856                                      Register Val, MachineMemOperand &MMO) {
857   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
858                         MMO);
859 }
860 MachineInstrBuilder
861 MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
862                                      Register Val, MachineMemOperand &MMO) {
863   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
864                         MMO);
865 }
866 
867 MachineInstrBuilder
868 MachineIRBuilder::buildAtomicRMWFAdd(
869   const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
870   MachineMemOperand &MMO) {
871   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
872                         MMO);
873 }
874 
875 MachineInstrBuilder
876 MachineIRBuilder::buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
877                                      MachineMemOperand &MMO) {
878   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
879                         MMO);
880 }
881 
882 MachineInstrBuilder
883 MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
884   return buildInstr(TargetOpcode::G_FENCE)
885     .addImm(Ordering)
886     .addImm(Scope);
887 }
888 
889 MachineInstrBuilder
890 MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
891 #ifndef NDEBUG
892   assert(getMRI()->getType(Res).isPointer() && "invalid res type");
893 #endif
894 
895   return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
896 }
897 
898 void MachineIRBuilder::validateTruncExt(const LLT DstTy, const LLT SrcTy,
899                                         bool IsExtend) {
900 #ifndef NDEBUG
901   if (DstTy.isVector()) {
902     assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
903     assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
904            "different number of elements in a trunc/ext");
905   } else
906     assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
907 
908   if (IsExtend)
909     assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
910            "invalid narrowing extend");
911   else
912     assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
913            "invalid widening trunc");
914 #endif
915 }
916 
917 void MachineIRBuilder::validateSelectOp(const LLT ResTy, const LLT TstTy,
918                                         const LLT Op0Ty, const LLT Op1Ty) {
919 #ifndef NDEBUG
920   assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
921          "invalid operand type");
922   assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
923   if (ResTy.isScalar() || ResTy.isPointer())
924     assert(TstTy.isScalar() && "type mismatch");
925   else
926     assert((TstTy.isScalar() ||
927             (TstTy.isVector() &&
928              TstTy.getNumElements() == Op0Ty.getNumElements())) &&
929            "type mismatch");
930 #endif
931 }
932 
933 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
934                                                  ArrayRef<DstOp> DstOps,
935                                                  ArrayRef<SrcOp> SrcOps,
936                                                  Optional<unsigned> Flags) {
937   switch (Opc) {
938   default:
939     break;
940   case TargetOpcode::G_SELECT: {
941     assert(DstOps.size() == 1 && "Invalid select");
942     assert(SrcOps.size() == 3 && "Invalid select");
943     validateSelectOp(
944         DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
945         SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
946     break;
947   }
948   case TargetOpcode::G_ADD:
949   case TargetOpcode::G_AND:
950   case TargetOpcode::G_MUL:
951   case TargetOpcode::G_OR:
952   case TargetOpcode::G_SUB:
953   case TargetOpcode::G_XOR:
954   case TargetOpcode::G_UDIV:
955   case TargetOpcode::G_SDIV:
956   case TargetOpcode::G_UREM:
957   case TargetOpcode::G_SREM:
958   case TargetOpcode::G_SMIN:
959   case TargetOpcode::G_SMAX:
960   case TargetOpcode::G_UMIN:
961   case TargetOpcode::G_UMAX:
962   case TargetOpcode::G_UADDSAT:
963   case TargetOpcode::G_SADDSAT:
964   case TargetOpcode::G_USUBSAT:
965   case TargetOpcode::G_SSUBSAT: {
966     // All these are binary ops.
967     assert(DstOps.size() == 1 && "Invalid Dst");
968     assert(SrcOps.size() == 2 && "Invalid Srcs");
969     validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
970                      SrcOps[0].getLLTTy(*getMRI()),
971                      SrcOps[1].getLLTTy(*getMRI()));
972     break;
973   }
974   case TargetOpcode::G_SHL:
975   case TargetOpcode::G_ASHR:
976   case TargetOpcode::G_LSHR: {
977     assert(DstOps.size() == 1 && "Invalid Dst");
978     assert(SrcOps.size() == 2 && "Invalid Srcs");
979     validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
980                     SrcOps[0].getLLTTy(*getMRI()),
981                     SrcOps[1].getLLTTy(*getMRI()));
982     break;
983   }
984   case TargetOpcode::G_SEXT:
985   case TargetOpcode::G_ZEXT:
986   case TargetOpcode::G_ANYEXT:
987     assert(DstOps.size() == 1 && "Invalid Dst");
988     assert(SrcOps.size() == 1 && "Invalid Srcs");
989     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
990                      SrcOps[0].getLLTTy(*getMRI()), true);
991     break;
992   case TargetOpcode::G_TRUNC:
993   case TargetOpcode::G_FPTRUNC: {
994     assert(DstOps.size() == 1 && "Invalid Dst");
995     assert(SrcOps.size() == 1 && "Invalid Srcs");
996     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
997                      SrcOps[0].getLLTTy(*getMRI()), false);
998     break;
999   }
1000   case TargetOpcode::G_BITCAST: {
1001     assert(DstOps.size() == 1 && "Invalid Dst");
1002     assert(SrcOps.size() == 1 && "Invalid Srcs");
1003     assert(DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1004            SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() && "invalid bitcast");
1005     break;
1006   }
1007   case TargetOpcode::COPY:
1008     assert(DstOps.size() == 1 && "Invalid Dst");
1009     // If the caller wants to add a subreg source it has to be done separately
1010     // so we may not have any SrcOps at this point yet.
1011     break;
1012   case TargetOpcode::G_FCMP:
1013   case TargetOpcode::G_ICMP: {
1014     assert(DstOps.size() == 1 && "Invalid Dst Operands");
1015     assert(SrcOps.size() == 3 && "Invalid Src Operands");
1016     // For F/ICMP, the first src operand is the predicate, followed by
1017     // the two comparands.
1018     assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
1019            "Expecting predicate");
1020     assert([&]() -> bool {
1021       CmpInst::Predicate Pred = SrcOps[0].getPredicate();
1022       return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
1023                                          : CmpInst::isFPPredicate(Pred);
1024     }() && "Invalid predicate");
1025     assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1026            "Type mismatch");
1027     assert([&]() -> bool {
1028       LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
1029       LLT DstTy = DstOps[0].getLLTTy(*getMRI());
1030       if (Op0Ty.isScalar() || Op0Ty.isPointer())
1031         return DstTy.isScalar();
1032       else
1033         return DstTy.isVector() &&
1034                DstTy.getNumElements() == Op0Ty.getNumElements();
1035     }() && "Type Mismatch");
1036     break;
1037   }
1038   case TargetOpcode::G_UNMERGE_VALUES: {
1039     assert(!DstOps.empty() && "Invalid trivial sequence");
1040     assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
1041     assert(std::all_of(DstOps.begin(), DstOps.end(),
1042                        [&, this](const DstOp &Op) {
1043                          return Op.getLLTTy(*getMRI()) ==
1044                                 DstOps[0].getLLTTy(*getMRI());
1045                        }) &&
1046            "type mismatch in output list");
1047     assert(DstOps.size() * DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1048                SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1049            "input operands do not cover output register");
1050     break;
1051   }
1052   case TargetOpcode::G_MERGE_VALUES: {
1053     assert(!SrcOps.empty() && "invalid trivial sequence");
1054     assert(DstOps.size() == 1 && "Invalid Dst");
1055     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1056                        [&, this](const SrcOp &Op) {
1057                          return Op.getLLTTy(*getMRI()) ==
1058                                 SrcOps[0].getLLTTy(*getMRI());
1059                        }) &&
1060            "type mismatch in input list");
1061     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1062                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1063            "input operands do not cover output register");
1064     if (SrcOps.size() == 1)
1065       return buildCast(DstOps[0], SrcOps[0]);
1066     if (DstOps[0].getLLTTy(*getMRI()).isVector()) {
1067       if (SrcOps[0].getLLTTy(*getMRI()).isVector())
1068         return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
1069       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1070     }
1071     break;
1072   }
1073   case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
1074     assert(DstOps.size() == 1 && "Invalid Dst size");
1075     assert(SrcOps.size() == 2 && "Invalid Src size");
1076     assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1077     assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
1078             DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
1079            "Invalid operand type");
1080     assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
1081     assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
1082                DstOps[0].getLLTTy(*getMRI()) &&
1083            "Type mismatch");
1084     break;
1085   }
1086   case TargetOpcode::G_INSERT_VECTOR_ELT: {
1087     assert(DstOps.size() == 1 && "Invalid dst size");
1088     assert(SrcOps.size() == 3 && "Invalid src size");
1089     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1090            SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1091     assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
1092                SrcOps[1].getLLTTy(*getMRI()) &&
1093            "Type mismatch");
1094     assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
1095     assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
1096                SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
1097            "Type mismatch");
1098     break;
1099   }
1100   case TargetOpcode::G_BUILD_VECTOR: {
1101     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1102            "Must have at least 2 operands");
1103     assert(DstOps.size() == 1 && "Invalid DstOps");
1104     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1105            "Res type must be a vector");
1106     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1107                        [&, this](const SrcOp &Op) {
1108                          return Op.getLLTTy(*getMRI()) ==
1109                                 SrcOps[0].getLLTTy(*getMRI());
1110                        }) &&
1111            "type mismatch in input list");
1112     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1113                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1114            "input scalars do not exactly cover the output vector register");
1115     break;
1116   }
1117   case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1118     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1119            "Must have at least 2 operands");
1120     assert(DstOps.size() == 1 && "Invalid DstOps");
1121     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1122            "Res type must be a vector");
1123     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1124                        [&, this](const SrcOp &Op) {
1125                          return Op.getLLTTy(*getMRI()) ==
1126                                 SrcOps[0].getLLTTy(*getMRI());
1127                        }) &&
1128            "type mismatch in input list");
1129     if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1130         DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
1131       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1132     break;
1133   }
1134   case TargetOpcode::G_CONCAT_VECTORS: {
1135     assert(DstOps.size() == 1 && "Invalid DstOps");
1136     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1137            "Must have at least 2 operands");
1138     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1139                        [&, this](const SrcOp &Op) {
1140                          return (Op.getLLTTy(*getMRI()).isVector() &&
1141                                  Op.getLLTTy(*getMRI()) ==
1142                                      SrcOps[0].getLLTTy(*getMRI()));
1143                        }) &&
1144            "type mismatch in input list");
1145     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1146                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1147            "input vectors do not exactly cover the output vector register");
1148     break;
1149   }
1150   case TargetOpcode::G_UADDE: {
1151     assert(DstOps.size() == 2 && "Invalid no of dst operands");
1152     assert(SrcOps.size() == 3 && "Invalid no of src operands");
1153     assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1154     assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1155            (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1156            "Invalid operand");
1157     assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1158     assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1159            "type mismatch");
1160     break;
1161   }
1162   }
1163 
1164   auto MIB = buildInstr(Opc);
1165   for (const DstOp &Op : DstOps)
1166     Op.addDefToMIB(*getMRI(), MIB);
1167   for (const SrcOp &Op : SrcOps)
1168     Op.addSrcToMIB(MIB);
1169   if (Flags)
1170     MIB->setFlags(*Flags);
1171   return MIB;
1172 }
1173