1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 /// \file
9 /// This file implements the MachineIRBuidler class.
10 //===----------------------------------------------------------------------===//
11 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12 #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
13 
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetLowering.h"
20 #include "llvm/CodeGen/TargetOpcodes.h"
21 #include "llvm/CodeGen/TargetSubtargetInfo.h"
22 #include "llvm/IR/DebugInfo.h"
23 
24 using namespace llvm;
25 
26 void MachineIRBuilder::setMF(MachineFunction &MF) {
27   State.MF = &MF;
28   State.MBB = nullptr;
29   State.MRI = &MF.getRegInfo();
30   State.TII = MF.getSubtarget().getInstrInfo();
31   State.DL = DebugLoc();
32   State.II = MachineBasicBlock::iterator();
33   State.Observer = nullptr;
34 }
35 
36 void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) {
37   State.MBB = &MBB;
38   State.II = MBB.end();
39   assert(&getMF() == MBB.getParent() &&
40          "Basic block is in a different function");
41 }
42 
43 void MachineIRBuilder::setInstr(MachineInstr &MI) {
44   assert(MI.getParent() && "Instruction is not part of a basic block");
45   setMBB(*MI.getParent());
46   State.II = MI.getIterator();
47 }
48 
49 void MachineIRBuilder::setCSEInfo(GISelCSEInfo *Info) { State.CSEInfo = Info; }
50 
51 void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB,
52                                    MachineBasicBlock::iterator II) {
53   assert(MBB.getParent() == &getMF() &&
54          "Basic block is in a different function");
55   State.MBB = &MBB;
56   State.II = II;
57 }
58 
59 void MachineIRBuilder::recordInsertion(MachineInstr *InsertedInstr) const {
60   if (State.Observer)
61     State.Observer->createdInstr(*InsertedInstr);
62 }
63 
64 void MachineIRBuilder::setChangeObserver(GISelChangeObserver &Observer) {
65   State.Observer = &Observer;
66 }
67 
68 void MachineIRBuilder::stopObservingChanges() { State.Observer = nullptr; }
69 
70 //------------------------------------------------------------------------------
71 // Build instruction variants.
72 //------------------------------------------------------------------------------
73 
74 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) {
75   return insertInstr(buildInstrNoInsert(Opcode));
76 }
77 
78 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
79   MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80   return MIB;
81 }
82 
83 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
84   getMBB().insert(getInsertPt(), MIB);
85   recordInsertion(MIB);
86   return MIB;
87 }
88 
89 MachineInstrBuilder
90 MachineIRBuilder::buildDirectDbgValue(unsigned Reg, const MDNode *Variable,
91                                       const MDNode *Expr) {
92   assert(isa<DILocalVariable>(Variable) && "not a variable");
93   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94   assert(
95       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96       "Expected inlined-at fields to agree");
97   return insertInstr(BuildMI(getMF(), getDL(),
98                              getTII().get(TargetOpcode::DBG_VALUE),
99                              /*IsIndirect*/ false, Reg, Variable, Expr));
100 }
101 
102 MachineInstrBuilder
103 MachineIRBuilder::buildIndirectDbgValue(unsigned Reg, const MDNode *Variable,
104                                         const MDNode *Expr) {
105   assert(isa<DILocalVariable>(Variable) && "not a variable");
106   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
107   assert(
108       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
109       "Expected inlined-at fields to agree");
110   return insertInstr(BuildMI(getMF(), getDL(),
111                              getTII().get(TargetOpcode::DBG_VALUE),
112                              /*IsIndirect*/ true, Reg, Variable, Expr));
113 }
114 
115 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
116                                                       const MDNode *Variable,
117                                                       const MDNode *Expr) {
118   assert(isa<DILocalVariable>(Variable) && "not a variable");
119   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
120   assert(
121       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
122       "Expected inlined-at fields to agree");
123   return buildInstr(TargetOpcode::DBG_VALUE)
124       .addFrameIndex(FI)
125       .addImm(0)
126       .addMetadata(Variable)
127       .addMetadata(Expr);
128 }
129 
130 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
131                                                          const MDNode *Variable,
132                                                          const MDNode *Expr) {
133   assert(isa<DILocalVariable>(Variable) && "not a variable");
134   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
135   assert(
136       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
137       "Expected inlined-at fields to agree");
138   auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
139   if (auto *CI = dyn_cast<ConstantInt>(&C)) {
140     if (CI->getBitWidth() > 64)
141       MIB.addCImm(CI);
142     else
143       MIB.addImm(CI->getZExtValue());
144   } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
145     MIB.addFPImm(CFP);
146   } else {
147     // Insert %noreg if we didn't find a usable constant and had to drop it.
148     MIB.addReg(0U);
149   }
150 
151   return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
152 }
153 
154 MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
155   assert(isa<DILabel>(Label) && "not a label");
156   assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
157          "Expected inlined-at fields to agree");
158   auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
159 
160   return MIB.addMetadata(Label);
161 }
162 
163 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(unsigned Res, int Idx) {
164   assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
165   return buildInstr(TargetOpcode::G_FRAME_INDEX)
166       .addDef(Res)
167       .addFrameIndex(Idx);
168 }
169 
170 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(unsigned Res,
171                                                        const GlobalValue *GV) {
172   assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
173   assert(getMRI()->getType(Res).getAddressSpace() ==
174              GV->getType()->getAddressSpace() &&
175          "address space mismatch");
176 
177   return buildInstr(TargetOpcode::G_GLOBAL_VALUE)
178       .addDef(Res)
179       .addGlobalAddress(GV);
180 }
181 
182 void MachineIRBuilder::validateBinaryOp(const LLT &Res, const LLT &Op0,
183                                         const LLT &Op1) {
184   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
185   assert((Res == Op0 && Res == Op1) && "type mismatch");
186 }
187 
188 void MachineIRBuilder::validateShiftOp(const LLT &Res, const LLT &Op0,
189                                        const LLT &Op1) {
190   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
191   assert((Res == Op0) && "type mismatch");
192 }
193 
194 MachineInstrBuilder MachineIRBuilder::buildGEP(unsigned Res, unsigned Op0,
195                                                unsigned Op1) {
196   assert(getMRI()->getType(Res).isPointer() &&
197          getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
198   assert(getMRI()->getType(Op1).isScalar() && "invalid offset type");
199 
200   return buildInstr(TargetOpcode::G_GEP)
201       .addDef(Res)
202       .addUse(Op0)
203       .addUse(Op1);
204 }
205 
206 Optional<MachineInstrBuilder>
207 MachineIRBuilder::materializeGEP(unsigned &Res, unsigned Op0,
208                                  const LLT &ValueTy, uint64_t Value) {
209   assert(Res == 0 && "Res is a result argument");
210   assert(ValueTy.isScalar()  && "invalid offset type");
211 
212   if (Value == 0) {
213     Res = Op0;
214     return None;
215   }
216 
217   Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
218   unsigned TmpReg = getMRI()->createGenericVirtualRegister(ValueTy);
219 
220   buildConstant(TmpReg, Value);
221   return buildGEP(Res, Op0, TmpReg);
222 }
223 
224 MachineInstrBuilder MachineIRBuilder::buildPtrMask(unsigned Res, unsigned Op0,
225                                                    uint32_t NumBits) {
226   assert(getMRI()->getType(Res).isPointer() &&
227          getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
228 
229   return buildInstr(TargetOpcode::G_PTR_MASK)
230       .addDef(Res)
231       .addUse(Op0)
232       .addImm(NumBits);
233 }
234 
235 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
236   return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
237 }
238 
239 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(unsigned Tgt) {
240   assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
241   return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
242 }
243 
244 MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
245                                                 const SrcOp &Op) {
246   return buildInstr(TargetOpcode::COPY, Res, Op);
247 }
248 
249 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
250                                                     const ConstantInt &Val) {
251   LLT Ty = Res.getLLTTy(*getMRI());
252   LLT EltTy = Ty.getScalarType();
253   assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
254          "creating constant with the wrong size");
255 
256   if (Ty.isVector()) {
257     auto Const = buildInstr(TargetOpcode::G_CONSTANT)
258     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
259     .addCImm(&Val);
260     return buildSplatVector(Res, Const);
261   }
262 
263   auto Const = buildInstr(TargetOpcode::G_CONSTANT);
264   Res.addDefToMIB(*getMRI(), Const);
265   Const.addCImm(&Val);
266   return Const;
267 }
268 
269 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
270                                                     int64_t Val) {
271   auto IntN = IntegerType::get(getMF().getFunction().getContext(),
272                                Res.getLLTTy(*getMRI()).getScalarSizeInBits());
273   ConstantInt *CI = ConstantInt::get(IntN, Val, true);
274   return buildConstant(Res, *CI);
275 }
276 
277 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
278                                                      const ConstantFP &Val) {
279   LLT Ty = Res.getLLTTy(*getMRI());
280   LLT EltTy = Ty.getScalarType();
281 
282   assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
283          == EltTy.getSizeInBits() &&
284          "creating fconstant with the wrong size");
285 
286   assert(!Ty.isPointer() && "invalid operand type");
287 
288   if (Ty.isVector()) {
289     auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
290     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
291     .addFPImm(&Val);
292 
293     return buildSplatVector(Res, Const);
294   }
295 
296   auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
297   Res.addDefToMIB(*getMRI(), Const);
298   Const.addFPImm(&Val);
299   return Const;
300 }
301 
302 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
303                                                     const APInt &Val) {
304   ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
305   return buildConstant(Res, *CI);
306 }
307 
308 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
309                                                      double Val) {
310   LLT DstTy = Res.getLLTTy(*getMRI());
311   auto &Ctx = getMF().getFunction().getContext();
312   auto *CFP =
313       ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
314   return buildFConstant(Res, *CFP);
315 }
316 
317 MachineInstrBuilder MachineIRBuilder::buildBrCond(unsigned Tst,
318                                                   MachineBasicBlock &Dest) {
319   assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
320 
321   return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
322 }
323 
324 MachineInstrBuilder MachineIRBuilder::buildLoad(unsigned Res, unsigned Addr,
325                                                 MachineMemOperand &MMO) {
326   return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
327 }
328 
329 MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
330                                                      unsigned Res,
331                                                      unsigned Addr,
332                                                      MachineMemOperand &MMO) {
333   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
334   assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
335 
336   return buildInstr(Opcode)
337       .addDef(Res)
338       .addUse(Addr)
339       .addMemOperand(&MMO);
340 }
341 
342 MachineInstrBuilder MachineIRBuilder::buildStore(unsigned Val, unsigned Addr,
343                                                  MachineMemOperand &MMO) {
344   assert(getMRI()->getType(Val).isValid() && "invalid operand type");
345   assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
346 
347   return buildInstr(TargetOpcode::G_STORE)
348       .addUse(Val)
349       .addUse(Addr)
350       .addMemOperand(&MMO);
351 }
352 
353 MachineInstrBuilder MachineIRBuilder::buildUAdde(const DstOp &Res,
354                                                  const DstOp &CarryOut,
355                                                  const SrcOp &Op0,
356                                                  const SrcOp &Op1,
357                                                  const SrcOp &CarryIn) {
358   return buildInstr(TargetOpcode::G_UADDE, {Res, CarryOut},
359                     {Op0, Op1, CarryIn});
360 }
361 
362 MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
363                                                   const SrcOp &Op) {
364   return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
365 }
366 
367 MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
368                                                 const SrcOp &Op) {
369   return buildInstr(TargetOpcode::G_SEXT, Res, Op);
370 }
371 
372 MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
373                                                 const SrcOp &Op) {
374   return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
375 }
376 
377 unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
378   const auto *TLI = getMF().getSubtarget().getTargetLowering();
379   switch (TLI->getBooleanContents(IsVec, IsFP)) {
380   case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
381     return TargetOpcode::G_SEXT;
382   case TargetLoweringBase::ZeroOrOneBooleanContent:
383     return TargetOpcode::G_ZEXT;
384   default:
385     return TargetOpcode::G_ANYEXT;
386   }
387 }
388 
389 MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
390                                                    const SrcOp &Op,
391                                                    bool IsFP) {
392   unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
393   return buildInstr(ExtOp, Res, Op);
394 }
395 
396 MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
397                                                       const DstOp &Res,
398                                                       const SrcOp &Op) {
399   assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
400           TargetOpcode::G_SEXT == ExtOpc) &&
401          "Expecting Extending Opc");
402   assert(Res.getLLTTy(*getMRI()).isScalar() ||
403          Res.getLLTTy(*getMRI()).isVector());
404   assert(Res.getLLTTy(*getMRI()).isScalar() ==
405          Op.getLLTTy(*getMRI()).isScalar());
406 
407   unsigned Opcode = TargetOpcode::COPY;
408   if (Res.getLLTTy(*getMRI()).getSizeInBits() >
409       Op.getLLTTy(*getMRI()).getSizeInBits())
410     Opcode = ExtOpc;
411   else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
412            Op.getLLTTy(*getMRI()).getSizeInBits())
413     Opcode = TargetOpcode::G_TRUNC;
414   else
415     assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
416 
417   return buildInstr(Opcode, Res, Op);
418 }
419 
420 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
421                                                        const SrcOp &Op) {
422   return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
423 }
424 
425 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
426                                                        const SrcOp &Op) {
427   return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
428 }
429 
430 MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
431                                                          const SrcOp &Op) {
432   return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
433 }
434 
435 MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
436                                                 const SrcOp &Src) {
437   LLT SrcTy = Src.getLLTTy(*getMRI());
438   LLT DstTy = Dst.getLLTTy(*getMRI());
439   if (SrcTy == DstTy)
440     return buildCopy(Dst, Src);
441 
442   unsigned Opcode;
443   if (SrcTy.isPointer() && DstTy.isScalar())
444     Opcode = TargetOpcode::G_PTRTOINT;
445   else if (DstTy.isPointer() && SrcTy.isScalar())
446     Opcode = TargetOpcode::G_INTTOPTR;
447   else {
448     assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
449     Opcode = TargetOpcode::G_BITCAST;
450   }
451 
452   return buildInstr(Opcode, Dst, Src);
453 }
454 
455 MachineInstrBuilder MachineIRBuilder::buildExtract(unsigned Res, unsigned Src,
456                                                    uint64_t Index) {
457 #ifndef NDEBUG
458   assert(getMRI()->getType(Src).isValid() && "invalid operand type");
459   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
460   assert(Index + getMRI()->getType(Res).getSizeInBits() <=
461              getMRI()->getType(Src).getSizeInBits() &&
462          "extracting off end of register");
463 #endif
464 
465   if (getMRI()->getType(Res).getSizeInBits() ==
466       getMRI()->getType(Src).getSizeInBits()) {
467     assert(Index == 0 && "insertion past the end of a register");
468     return buildCast(Res, Src);
469   }
470 
471   return buildInstr(TargetOpcode::G_EXTRACT)
472       .addDef(Res)
473       .addUse(Src)
474       .addImm(Index);
475 }
476 
477 void MachineIRBuilder::buildSequence(unsigned Res, ArrayRef<unsigned> Ops,
478                                      ArrayRef<uint64_t> Indices) {
479 #ifndef NDEBUG
480   assert(Ops.size() == Indices.size() && "incompatible args");
481   assert(!Ops.empty() && "invalid trivial sequence");
482   assert(std::is_sorted(Indices.begin(), Indices.end()) &&
483          "sequence offsets must be in ascending order");
484 
485   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
486   for (auto Op : Ops)
487     assert(getMRI()->getType(Op).isValid() && "invalid operand type");
488 #endif
489 
490   LLT ResTy = getMRI()->getType(Res);
491   LLT OpTy = getMRI()->getType(Ops[0]);
492   unsigned OpSize = OpTy.getSizeInBits();
493   bool MaybeMerge = true;
494   for (unsigned i = 0; i < Ops.size(); ++i) {
495     if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
496       MaybeMerge = false;
497       break;
498     }
499   }
500 
501   if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
502     buildMerge(Res, Ops);
503     return;
504   }
505 
506   unsigned ResIn = getMRI()->createGenericVirtualRegister(ResTy);
507   buildUndef(ResIn);
508 
509   for (unsigned i = 0; i < Ops.size(); ++i) {
510     unsigned ResOut = i + 1 == Ops.size()
511                           ? Res
512                           : getMRI()->createGenericVirtualRegister(ResTy);
513     buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
514     ResIn = ResOut;
515   }
516 }
517 
518 MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
519   return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
520 }
521 
522 MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
523                                                  ArrayRef<unsigned> Ops) {
524   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
525   // we need some temporary storage for the DstOp objects. Here we use a
526   // sufficiently large SmallVector to not go through the heap.
527   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
528   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
529 }
530 
531 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
532                                                    const SrcOp &Op) {
533   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
534   // we need some temporary storage for the DstOp objects. Here we use a
535   // sufficiently large SmallVector to not go through the heap.
536   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
537   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
538 }
539 
540 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<unsigned> Res,
541                                                    const SrcOp &Op) {
542   // Unfortunately to convert from ArrayRef<unsigned> to ArrayRef<DstOp>,
543   // we need some temporary storage for the DstOp objects. Here we use a
544   // sufficiently large SmallVector to not go through the heap.
545   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
546   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
547 }
548 
549 MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
550                                                        ArrayRef<unsigned> Ops) {
551   // Unfortunately to convert from ArrayRef<unsigned> to ArrayRef<SrcOp>,
552   // we need some temporary storage for the DstOp objects. Here we use a
553   // sufficiently large SmallVector to not go through the heap.
554   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
555   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
556 }
557 
558 MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
559                                                        const SrcOp &Src) {
560   SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
561   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
562 }
563 
564 MachineInstrBuilder
565 MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
566                                         ArrayRef<unsigned> Ops) {
567   // Unfortunately to convert from ArrayRef<unsigned> to ArrayRef<SrcOp>,
568   // we need some temporary storage for the DstOp objects. Here we use a
569   // sufficiently large SmallVector to not go through the heap.
570   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
571   return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
572 }
573 
574 MachineInstrBuilder
575 MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<unsigned> Ops) {
576   // Unfortunately to convert from ArrayRef<unsigned> to ArrayRef<SrcOp>,
577   // we need some temporary storage for the DstOp objects. Here we use a
578   // sufficiently large SmallVector to not go through the heap.
579   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
580   return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
581 }
582 
583 MachineInstrBuilder MachineIRBuilder::buildInsert(unsigned Res, unsigned Src,
584                                                   unsigned Op, unsigned Index) {
585   assert(Index + getMRI()->getType(Op).getSizeInBits() <=
586              getMRI()->getType(Res).getSizeInBits() &&
587          "insertion past the end of a register");
588 
589   if (getMRI()->getType(Res).getSizeInBits() ==
590       getMRI()->getType(Op).getSizeInBits()) {
591     return buildCast(Res, Op);
592   }
593 
594   return buildInstr(TargetOpcode::G_INSERT)
595       .addDef(Res)
596       .addUse(Src)
597       .addUse(Op)
598       .addImm(Index);
599 }
600 
601 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
602                                                      unsigned Res,
603                                                      bool HasSideEffects) {
604   auto MIB =
605       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
606                                 : TargetOpcode::G_INTRINSIC);
607   if (Res)
608     MIB.addDef(Res);
609   MIB.addIntrinsicID(ID);
610   return MIB;
611 }
612 
613 MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
614                                                  const SrcOp &Op) {
615   return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
616 }
617 
618 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
619                                                    const SrcOp &Op) {
620   return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op);
621 }
622 
623 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
624                                                 const DstOp &Res,
625                                                 const SrcOp &Op0,
626                                                 const SrcOp &Op1) {
627   return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
628 }
629 
630 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
631                                                 const DstOp &Res,
632                                                 const SrcOp &Op0,
633                                                 const SrcOp &Op1) {
634 
635   return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1});
636 }
637 
638 MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
639                                                   const SrcOp &Tst,
640                                                   const SrcOp &Op0,
641                                                   const SrcOp &Op1) {
642 
643   return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1});
644 }
645 
646 MachineInstrBuilder
647 MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
648                                            const SrcOp &Elt, const SrcOp &Idx) {
649   return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
650 }
651 
652 MachineInstrBuilder
653 MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
654                                             const SrcOp &Idx) {
655   return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
656 }
657 
658 MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
659     unsigned OldValRes, unsigned SuccessRes, unsigned Addr, unsigned CmpVal,
660     unsigned NewVal, MachineMemOperand &MMO) {
661 #ifndef NDEBUG
662   LLT OldValResTy = getMRI()->getType(OldValRes);
663   LLT SuccessResTy = getMRI()->getType(SuccessRes);
664   LLT AddrTy = getMRI()->getType(Addr);
665   LLT CmpValTy = getMRI()->getType(CmpVal);
666   LLT NewValTy = getMRI()->getType(NewVal);
667   assert(OldValResTy.isScalar() && "invalid operand type");
668   assert(SuccessResTy.isScalar() && "invalid operand type");
669   assert(AddrTy.isPointer() && "invalid operand type");
670   assert(CmpValTy.isValid() && "invalid operand type");
671   assert(NewValTy.isValid() && "invalid operand type");
672   assert(OldValResTy == CmpValTy && "type mismatch");
673   assert(OldValResTy == NewValTy && "type mismatch");
674 #endif
675 
676   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
677       .addDef(OldValRes)
678       .addDef(SuccessRes)
679       .addUse(Addr)
680       .addUse(CmpVal)
681       .addUse(NewVal)
682       .addMemOperand(&MMO);
683 }
684 
685 MachineInstrBuilder
686 MachineIRBuilder::buildAtomicCmpXchg(unsigned OldValRes, unsigned Addr,
687                                      unsigned CmpVal, unsigned NewVal,
688                                      MachineMemOperand &MMO) {
689 #ifndef NDEBUG
690   LLT OldValResTy = getMRI()->getType(OldValRes);
691   LLT AddrTy = getMRI()->getType(Addr);
692   LLT CmpValTy = getMRI()->getType(CmpVal);
693   LLT NewValTy = getMRI()->getType(NewVal);
694   assert(OldValResTy.isScalar() && "invalid operand type");
695   assert(AddrTy.isPointer() && "invalid operand type");
696   assert(CmpValTy.isValid() && "invalid operand type");
697   assert(NewValTy.isValid() && "invalid operand type");
698   assert(OldValResTy == CmpValTy && "type mismatch");
699   assert(OldValResTy == NewValTy && "type mismatch");
700 #endif
701 
702   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
703       .addDef(OldValRes)
704       .addUse(Addr)
705       .addUse(CmpVal)
706       .addUse(NewVal)
707       .addMemOperand(&MMO);
708 }
709 
710 MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(unsigned Opcode,
711                                                      unsigned OldValRes,
712                                                      unsigned Addr,
713                                                      unsigned Val,
714                                                      MachineMemOperand &MMO) {
715 #ifndef NDEBUG
716   LLT OldValResTy = getMRI()->getType(OldValRes);
717   LLT AddrTy = getMRI()->getType(Addr);
718   LLT ValTy = getMRI()->getType(Val);
719   assert(OldValResTy.isScalar() && "invalid operand type");
720   assert(AddrTy.isPointer() && "invalid operand type");
721   assert(ValTy.isValid() && "invalid operand type");
722   assert(OldValResTy == ValTy && "type mismatch");
723 #endif
724 
725   return buildInstr(Opcode)
726       .addDef(OldValRes)
727       .addUse(Addr)
728       .addUse(Val)
729       .addMemOperand(&MMO);
730 }
731 
732 MachineInstrBuilder
733 MachineIRBuilder::buildAtomicRMWXchg(unsigned OldValRes, unsigned Addr,
734                                      unsigned Val, MachineMemOperand &MMO) {
735   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
736                         MMO);
737 }
738 MachineInstrBuilder
739 MachineIRBuilder::buildAtomicRMWAdd(unsigned OldValRes, unsigned Addr,
740                                     unsigned Val, MachineMemOperand &MMO) {
741   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
742                         MMO);
743 }
744 MachineInstrBuilder
745 MachineIRBuilder::buildAtomicRMWSub(unsigned OldValRes, unsigned Addr,
746                                     unsigned Val, MachineMemOperand &MMO) {
747   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
748                         MMO);
749 }
750 MachineInstrBuilder
751 MachineIRBuilder::buildAtomicRMWAnd(unsigned OldValRes, unsigned Addr,
752                                     unsigned Val, MachineMemOperand &MMO) {
753   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
754                         MMO);
755 }
756 MachineInstrBuilder
757 MachineIRBuilder::buildAtomicRMWNand(unsigned OldValRes, unsigned Addr,
758                                      unsigned Val, MachineMemOperand &MMO) {
759   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
760                         MMO);
761 }
762 MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(unsigned OldValRes,
763                                                        unsigned Addr,
764                                                        unsigned Val,
765                                                        MachineMemOperand &MMO) {
766   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
767                         MMO);
768 }
769 MachineInstrBuilder
770 MachineIRBuilder::buildAtomicRMWXor(unsigned OldValRes, unsigned Addr,
771                                     unsigned Val, MachineMemOperand &MMO) {
772   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
773                         MMO);
774 }
775 MachineInstrBuilder
776 MachineIRBuilder::buildAtomicRMWMax(unsigned OldValRes, unsigned Addr,
777                                     unsigned Val, MachineMemOperand &MMO) {
778   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
779                         MMO);
780 }
781 MachineInstrBuilder
782 MachineIRBuilder::buildAtomicRMWMin(unsigned OldValRes, unsigned Addr,
783                                     unsigned Val, MachineMemOperand &MMO) {
784   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
785                         MMO);
786 }
787 MachineInstrBuilder
788 MachineIRBuilder::buildAtomicRMWUmax(unsigned OldValRes, unsigned Addr,
789                                      unsigned Val, MachineMemOperand &MMO) {
790   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
791                         MMO);
792 }
793 MachineInstrBuilder
794 MachineIRBuilder::buildAtomicRMWUmin(unsigned OldValRes, unsigned Addr,
795                                      unsigned Val, MachineMemOperand &MMO) {
796   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
797                         MMO);
798 }
799 
800 MachineInstrBuilder
801 MachineIRBuilder::buildBlockAddress(unsigned Res, const BlockAddress *BA) {
802 #ifndef NDEBUG
803   assert(getMRI()->getType(Res).isPointer() && "invalid res type");
804 #endif
805 
806   return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
807 }
808 
809 void MachineIRBuilder::validateTruncExt(const LLT &DstTy, const LLT &SrcTy,
810                                         bool IsExtend) {
811 #ifndef NDEBUG
812   if (DstTy.isVector()) {
813     assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
814     assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
815            "different number of elements in a trunc/ext");
816   } else
817     assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
818 
819   if (IsExtend)
820     assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
821            "invalid narrowing extend");
822   else
823     assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
824            "invalid widening trunc");
825 #endif
826 }
827 
828 void MachineIRBuilder::validateSelectOp(const LLT &ResTy, const LLT &TstTy,
829                                         const LLT &Op0Ty, const LLT &Op1Ty) {
830 #ifndef NDEBUG
831   assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
832          "invalid operand type");
833   assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
834   if (ResTy.isScalar() || ResTy.isPointer())
835     assert(TstTy.isScalar() && "type mismatch");
836   else
837     assert((TstTy.isScalar() ||
838             (TstTy.isVector() &&
839              TstTy.getNumElements() == Op0Ty.getNumElements())) &&
840            "type mismatch");
841 #endif
842 }
843 
844 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
845                                                  ArrayRef<DstOp> DstOps,
846                                                  ArrayRef<SrcOp> SrcOps,
847                                                  Optional<unsigned> Flags) {
848   switch (Opc) {
849   default:
850     break;
851   case TargetOpcode::G_SELECT: {
852     assert(DstOps.size() == 1 && "Invalid select");
853     assert(SrcOps.size() == 3 && "Invalid select");
854     validateSelectOp(
855         DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
856         SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
857     break;
858   }
859   case TargetOpcode::G_ADD:
860   case TargetOpcode::G_AND:
861   case TargetOpcode::G_MUL:
862   case TargetOpcode::G_OR:
863   case TargetOpcode::G_SUB:
864   case TargetOpcode::G_XOR:
865   case TargetOpcode::G_UDIV:
866   case TargetOpcode::G_SDIV:
867   case TargetOpcode::G_UREM:
868   case TargetOpcode::G_SREM: {
869     // All these are binary ops.
870     assert(DstOps.size() == 1 && "Invalid Dst");
871     assert(SrcOps.size() == 2 && "Invalid Srcs");
872     validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
873                      SrcOps[0].getLLTTy(*getMRI()),
874                      SrcOps[1].getLLTTy(*getMRI()));
875     break;
876   }
877   case TargetOpcode::G_SHL:
878   case TargetOpcode::G_ASHR:
879   case TargetOpcode::G_LSHR: {
880     assert(DstOps.size() == 1 && "Invalid Dst");
881     assert(SrcOps.size() == 2 && "Invalid Srcs");
882     validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
883                     SrcOps[0].getLLTTy(*getMRI()),
884                     SrcOps[1].getLLTTy(*getMRI()));
885     break;
886   }
887   case TargetOpcode::G_SEXT:
888   case TargetOpcode::G_ZEXT:
889   case TargetOpcode::G_ANYEXT:
890     assert(DstOps.size() == 1 && "Invalid Dst");
891     assert(SrcOps.size() == 1 && "Invalid Srcs");
892     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
893                      SrcOps[0].getLLTTy(*getMRI()), true);
894     break;
895   case TargetOpcode::G_TRUNC:
896   case TargetOpcode::G_FPTRUNC: {
897     assert(DstOps.size() == 1 && "Invalid Dst");
898     assert(SrcOps.size() == 1 && "Invalid Srcs");
899     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
900                      SrcOps[0].getLLTTy(*getMRI()), false);
901     break;
902   }
903   case TargetOpcode::COPY:
904     assert(DstOps.size() == 1 && "Invalid Dst");
905     assert(SrcOps.size() == 1 && "Invalid Srcs");
906     assert(DstOps[0].getLLTTy(*getMRI()) == LLT() ||
907            SrcOps[0].getLLTTy(*getMRI()) == LLT() ||
908            DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI()));
909     break;
910   case TargetOpcode::G_FCMP:
911   case TargetOpcode::G_ICMP: {
912     assert(DstOps.size() == 1 && "Invalid Dst Operands");
913     assert(SrcOps.size() == 3 && "Invalid Src Operands");
914     // For F/ICMP, the first src operand is the predicate, followed by
915     // the two comparands.
916     assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
917            "Expecting predicate");
918     assert([&]() -> bool {
919       CmpInst::Predicate Pred = SrcOps[0].getPredicate();
920       return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
921                                          : CmpInst::isFPPredicate(Pred);
922     }() && "Invalid predicate");
923     assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
924            "Type mismatch");
925     assert([&]() -> bool {
926       LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
927       LLT DstTy = DstOps[0].getLLTTy(*getMRI());
928       if (Op0Ty.isScalar() || Op0Ty.isPointer())
929         return DstTy.isScalar();
930       else
931         return DstTy.isVector() &&
932                DstTy.getNumElements() == Op0Ty.getNumElements();
933     }() && "Type Mismatch");
934     break;
935   }
936   case TargetOpcode::G_UNMERGE_VALUES: {
937     assert(!DstOps.empty() && "Invalid trivial sequence");
938     assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
939     assert(std::all_of(DstOps.begin(), DstOps.end(),
940                        [&, this](const DstOp &Op) {
941                          return Op.getLLTTy(*getMRI()) ==
942                                 DstOps[0].getLLTTy(*getMRI());
943                        }) &&
944            "type mismatch in output list");
945     assert(DstOps.size() * DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
946                SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
947            "input operands do not cover output register");
948     break;
949   }
950   case TargetOpcode::G_MERGE_VALUES: {
951     assert(!SrcOps.empty() && "invalid trivial sequence");
952     assert(DstOps.size() == 1 && "Invalid Dst");
953     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
954                        [&, this](const SrcOp &Op) {
955                          return Op.getLLTTy(*getMRI()) ==
956                                 SrcOps[0].getLLTTy(*getMRI());
957                        }) &&
958            "type mismatch in input list");
959     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
960                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
961            "input operands do not cover output register");
962     if (SrcOps.size() == 1)
963       return buildCast(DstOps[0], SrcOps[0]);
964     if (DstOps[0].getLLTTy(*getMRI()).isVector())
965       return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
966     break;
967   }
968   case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
969     assert(DstOps.size() == 1 && "Invalid Dst size");
970     assert(SrcOps.size() == 2 && "Invalid Src size");
971     assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
972     assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
973             DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
974            "Invalid operand type");
975     assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
976     assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
977                DstOps[0].getLLTTy(*getMRI()) &&
978            "Type mismatch");
979     break;
980   }
981   case TargetOpcode::G_INSERT_VECTOR_ELT: {
982     assert(DstOps.size() == 1 && "Invalid dst size");
983     assert(SrcOps.size() == 3 && "Invalid src size");
984     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
985            SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
986     assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
987                SrcOps[1].getLLTTy(*getMRI()) &&
988            "Type mismatch");
989     assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
990     assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
991                SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
992            "Type mismatch");
993     break;
994   }
995   case TargetOpcode::G_BUILD_VECTOR: {
996     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
997            "Must have at least 2 operands");
998     assert(DstOps.size() == 1 && "Invalid DstOps");
999     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1000            "Res type must be a vector");
1001     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1002                        [&, this](const SrcOp &Op) {
1003                          return Op.getLLTTy(*getMRI()) ==
1004                                 SrcOps[0].getLLTTy(*getMRI());
1005                        }) &&
1006            "type mismatch in input list");
1007     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1008                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1009            "input scalars do not exactly cover the output vector register");
1010     break;
1011   }
1012   case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1013     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1014            "Must have at least 2 operands");
1015     assert(DstOps.size() == 1 && "Invalid DstOps");
1016     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1017            "Res type must be a vector");
1018     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1019                        [&, this](const SrcOp &Op) {
1020                          return Op.getLLTTy(*getMRI()) ==
1021                                 SrcOps[0].getLLTTy(*getMRI());
1022                        }) &&
1023            "type mismatch in input list");
1024     if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1025         DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
1026       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1027     break;
1028   }
1029   case TargetOpcode::G_CONCAT_VECTORS: {
1030     assert(DstOps.size() == 1 && "Invalid DstOps");
1031     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1032            "Must have at least 2 operands");
1033     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1034                        [&, this](const SrcOp &Op) {
1035                          return (Op.getLLTTy(*getMRI()).isVector() &&
1036                                  Op.getLLTTy(*getMRI()) ==
1037                                      SrcOps[0].getLLTTy(*getMRI()));
1038                        }) &&
1039            "type mismatch in input list");
1040     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1041                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1042            "input vectors do not exactly cover the output vector register");
1043     break;
1044   }
1045   case TargetOpcode::G_UADDE: {
1046     assert(DstOps.size() == 2 && "Invalid no of dst operands");
1047     assert(SrcOps.size() == 3 && "Invalid no of src operands");
1048     assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1049     assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1050            (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1051            "Invalid operand");
1052     assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1053     assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1054            "type mismatch");
1055     break;
1056   }
1057   }
1058 
1059   auto MIB = buildInstr(Opc);
1060   for (const DstOp &Op : DstOps)
1061     Op.addDefToMIB(*getMRI(), MIB);
1062   for (const SrcOp &Op : SrcOps)
1063     Op.addSrcToMIB(MIB);
1064   if (Flags)
1065     MIB->setFlags(*Flags);
1066   return MIB;
1067 }
1068