1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 /// \file
9 /// This file implements the MachineIRBuidler class.
10 //===----------------------------------------------------------------------===//
11 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12 #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
13 
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetLowering.h"
20 #include "llvm/CodeGen/TargetOpcodes.h"
21 #include "llvm/CodeGen/TargetSubtargetInfo.h"
22 #include "llvm/IR/DebugInfo.h"
23 
24 using namespace llvm;
25 
26 void MachineIRBuilder::setMF(MachineFunction &MF) {
27   State.MF = &MF;
28   State.MBB = nullptr;
29   State.MRI = &MF.getRegInfo();
30   State.TII = MF.getSubtarget().getInstrInfo();
31   State.DL = DebugLoc();
32   State.II = MachineBasicBlock::iterator();
33   State.Observer = nullptr;
34 }
35 
36 void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) {
37   State.MBB = &MBB;
38   State.II = MBB.end();
39   assert(&getMF() == MBB.getParent() &&
40          "Basic block is in a different function");
41 }
42 
43 void MachineIRBuilder::setInstr(MachineInstr &MI) {
44   assert(MI.getParent() && "Instruction is not part of a basic block");
45   setMBB(*MI.getParent());
46   State.II = MI.getIterator();
47 }
48 
49 void MachineIRBuilder::setCSEInfo(GISelCSEInfo *Info) { State.CSEInfo = Info; }
50 
51 void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB,
52                                    MachineBasicBlock::iterator II) {
53   assert(MBB.getParent() == &getMF() &&
54          "Basic block is in a different function");
55   State.MBB = &MBB;
56   State.II = II;
57 }
58 
59 void MachineIRBuilder::recordInsertion(MachineInstr *InsertedInstr) const {
60   if (State.Observer)
61     State.Observer->createdInstr(*InsertedInstr);
62 }
63 
64 void MachineIRBuilder::setChangeObserver(GISelChangeObserver &Observer) {
65   State.Observer = &Observer;
66 }
67 
68 void MachineIRBuilder::stopObservingChanges() { State.Observer = nullptr; }
69 
70 //------------------------------------------------------------------------------
71 // Build instruction variants.
72 //------------------------------------------------------------------------------
73 
74 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) {
75   return insertInstr(buildInstrNoInsert(Opcode));
76 }
77 
78 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
79   MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80   return MIB;
81 }
82 
83 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
84   getMBB().insert(getInsertPt(), MIB);
85   recordInsertion(MIB);
86   return MIB;
87 }
88 
89 MachineInstrBuilder
90 MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
91                                       const MDNode *Expr) {
92   assert(isa<DILocalVariable>(Variable) && "not a variable");
93   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94   assert(
95       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96       "Expected inlined-at fields to agree");
97   return insertInstr(BuildMI(getMF(), getDL(),
98                              getTII().get(TargetOpcode::DBG_VALUE),
99                              /*IsIndirect*/ false, Reg, Variable, Expr));
100 }
101 
102 MachineInstrBuilder
103 MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
104                                         const MDNode *Expr) {
105   assert(isa<DILocalVariable>(Variable) && "not a variable");
106   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
107   assert(
108       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
109       "Expected inlined-at fields to agree");
110   return insertInstr(BuildMI(getMF(), getDL(),
111                              getTII().get(TargetOpcode::DBG_VALUE),
112                              /*IsIndirect*/ true, Reg, Variable, Expr));
113 }
114 
115 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
116                                                       const MDNode *Variable,
117                                                       const MDNode *Expr) {
118   assert(isa<DILocalVariable>(Variable) && "not a variable");
119   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
120   assert(
121       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
122       "Expected inlined-at fields to agree");
123   return buildInstr(TargetOpcode::DBG_VALUE)
124       .addFrameIndex(FI)
125       .addImm(0)
126       .addMetadata(Variable)
127       .addMetadata(Expr);
128 }
129 
130 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
131                                                          const MDNode *Variable,
132                                                          const MDNode *Expr) {
133   assert(isa<DILocalVariable>(Variable) && "not a variable");
134   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
135   assert(
136       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
137       "Expected inlined-at fields to agree");
138   auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
139   if (auto *CI = dyn_cast<ConstantInt>(&C)) {
140     if (CI->getBitWidth() > 64)
141       MIB.addCImm(CI);
142     else
143       MIB.addImm(CI->getZExtValue());
144   } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
145     MIB.addFPImm(CFP);
146   } else {
147     // Insert %noreg if we didn't find a usable constant and had to drop it.
148     MIB.addReg(0U);
149   }
150 
151   return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
152 }
153 
154 MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
155   assert(isa<DILabel>(Label) && "not a label");
156   assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
157          "Expected inlined-at fields to agree");
158   auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
159 
160   return MIB.addMetadata(Label);
161 }
162 
163 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
164                                                       int Idx) {
165   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
166   auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
167   Res.addDefToMIB(*getMRI(), MIB);
168   MIB.addFrameIndex(Idx);
169   return MIB;
170 }
171 
172 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
173                                                        const GlobalValue *GV) {
174   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
175   assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
176              GV->getType()->getAddressSpace() &&
177          "address space mismatch");
178 
179   auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
180   Res.addDefToMIB(*getMRI(), MIB);
181   MIB.addGlobalAddress(GV);
182   return MIB;
183 }
184 
185 MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
186                                                      unsigned JTI) {
187   return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
188       .addJumpTableIndex(JTI);
189 }
190 
191 void MachineIRBuilder::validateBinaryOp(const LLT &Res, const LLT &Op0,
192                                         const LLT &Op1) {
193   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
194   assert((Res == Op0 && Res == Op1) && "type mismatch");
195 }
196 
197 void MachineIRBuilder::validateShiftOp(const LLT &Res, const LLT &Op0,
198                                        const LLT &Op1) {
199   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
200   assert((Res == Op0) && "type mismatch");
201 }
202 
203 MachineInstrBuilder MachineIRBuilder::buildGEP(const DstOp &Res,
204                                                const SrcOp &Op0,
205                                                const SrcOp &Op1) {
206   assert(Res.getLLTTy(*getMRI()).isPointer() &&
207          Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
208   assert(Op1.getLLTTy(*getMRI()).isScalar() && "invalid offset type");
209 
210   return buildInstr(TargetOpcode::G_GEP, {Res}, {Op0, Op1});
211 }
212 
213 Optional<MachineInstrBuilder>
214 MachineIRBuilder::materializeGEP(Register &Res, Register Op0,
215                                  const LLT &ValueTy, uint64_t Value) {
216   assert(Res == 0 && "Res is a result argument");
217   assert(ValueTy.isScalar()  && "invalid offset type");
218 
219   if (Value == 0) {
220     Res = Op0;
221     return None;
222   }
223 
224   Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
225   auto Cst = buildConstant(ValueTy, Value);
226   return buildGEP(Res, Op0, Cst.getReg(0));
227 }
228 
229 MachineInstrBuilder MachineIRBuilder::buildPtrMask(const DstOp &Res,
230                                                    const SrcOp &Op0,
231                                                    uint32_t NumBits) {
232   assert(Res.getLLTTy(*getMRI()).isPointer() &&
233          Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
234 
235   auto MIB = buildInstr(TargetOpcode::G_PTR_MASK);
236   Res.addDefToMIB(*getMRI(), MIB);
237   Op0.addSrcToMIB(MIB);
238   MIB.addImm(NumBits);
239   return MIB;
240 }
241 
242 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
243   return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
244 }
245 
246 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
247   assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
248   return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
249 }
250 
251 MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
252                                                 unsigned JTI,
253                                                 Register IndexReg) {
254   assert(getMRI()->getType(TablePtr).isPointer() &&
255          "Table reg must be a pointer");
256   return buildInstr(TargetOpcode::G_BRJT)
257       .addUse(TablePtr)
258       .addJumpTableIndex(JTI)
259       .addUse(IndexReg);
260 }
261 
262 MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
263                                                 const SrcOp &Op) {
264   return buildInstr(TargetOpcode::COPY, Res, Op);
265 }
266 
267 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
268                                                     const ConstantInt &Val) {
269   LLT Ty = Res.getLLTTy(*getMRI());
270   LLT EltTy = Ty.getScalarType();
271   assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
272          "creating constant with the wrong size");
273 
274   if (Ty.isVector()) {
275     auto Const = buildInstr(TargetOpcode::G_CONSTANT)
276     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
277     .addCImm(&Val);
278     return buildSplatVector(Res, Const);
279   }
280 
281   auto Const = buildInstr(TargetOpcode::G_CONSTANT);
282   Res.addDefToMIB(*getMRI(), Const);
283   Const.addCImm(&Val);
284   return Const;
285 }
286 
287 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
288                                                     int64_t Val) {
289   auto IntN = IntegerType::get(getMF().getFunction().getContext(),
290                                Res.getLLTTy(*getMRI()).getScalarSizeInBits());
291   ConstantInt *CI = ConstantInt::get(IntN, Val, true);
292   return buildConstant(Res, *CI);
293 }
294 
295 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
296                                                      const ConstantFP &Val) {
297   LLT Ty = Res.getLLTTy(*getMRI());
298   LLT EltTy = Ty.getScalarType();
299 
300   assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
301          == EltTy.getSizeInBits() &&
302          "creating fconstant with the wrong size");
303 
304   assert(!Ty.isPointer() && "invalid operand type");
305 
306   if (Ty.isVector()) {
307     auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
308     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
309     .addFPImm(&Val);
310 
311     return buildSplatVector(Res, Const);
312   }
313 
314   auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
315   Res.addDefToMIB(*getMRI(), Const);
316   Const.addFPImm(&Val);
317   return Const;
318 }
319 
320 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
321                                                     const APInt &Val) {
322   ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
323   return buildConstant(Res, *CI);
324 }
325 
326 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
327                                                      double Val) {
328   LLT DstTy = Res.getLLTTy(*getMRI());
329   auto &Ctx = getMF().getFunction().getContext();
330   auto *CFP =
331       ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
332   return buildFConstant(Res, *CFP);
333 }
334 
335 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
336                                                      const APFloat &Val) {
337   auto &Ctx = getMF().getFunction().getContext();
338   auto *CFP = ConstantFP::get(Ctx, Val);
339   return buildFConstant(Res, *CFP);
340 }
341 
342 MachineInstrBuilder MachineIRBuilder::buildBrCond(Register Tst,
343                                                   MachineBasicBlock &Dest) {
344   assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
345 
346   return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
347 }
348 
349 MachineInstrBuilder MachineIRBuilder::buildLoad(const DstOp &Res,
350                                                 const SrcOp &Addr,
351                                                 MachineMemOperand &MMO) {
352   return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
353 }
354 
355 MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
356                                                      const DstOp &Res,
357                                                      const SrcOp &Addr,
358                                                      MachineMemOperand &MMO) {
359   assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
360   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
361 
362   auto MIB = buildInstr(Opcode);
363   Res.addDefToMIB(*getMRI(), MIB);
364   Addr.addSrcToMIB(MIB);
365   MIB.addMemOperand(&MMO);
366   return MIB;
367 }
368 
369 MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
370                                                  const SrcOp &Addr,
371                                                  MachineMemOperand &MMO) {
372   assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
373   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
374 
375   auto MIB = buildInstr(TargetOpcode::G_STORE);
376   Val.addSrcToMIB(MIB);
377   Addr.addSrcToMIB(MIB);
378   MIB.addMemOperand(&MMO);
379   return MIB;
380 }
381 
382 MachineInstrBuilder MachineIRBuilder::buildUAddo(const DstOp &Res,
383                                                  const DstOp &CarryOut,
384                                                  const SrcOp &Op0,
385                                                  const SrcOp &Op1) {
386   return buildInstr(TargetOpcode::G_UADDO, {Res, CarryOut}, {Op0, Op1});
387 }
388 
389 MachineInstrBuilder MachineIRBuilder::buildUAdde(const DstOp &Res,
390                                                  const DstOp &CarryOut,
391                                                  const SrcOp &Op0,
392                                                  const SrcOp &Op1,
393                                                  const SrcOp &CarryIn) {
394   return buildInstr(TargetOpcode::G_UADDE, {Res, CarryOut},
395                     {Op0, Op1, CarryIn});
396 }
397 
398 MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
399                                                   const SrcOp &Op) {
400   return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
401 }
402 
403 MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
404                                                 const SrcOp &Op) {
405   return buildInstr(TargetOpcode::G_SEXT, Res, Op);
406 }
407 
408 MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
409                                                 const SrcOp &Op) {
410   return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
411 }
412 
413 unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
414   const auto *TLI = getMF().getSubtarget().getTargetLowering();
415   switch (TLI->getBooleanContents(IsVec, IsFP)) {
416   case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
417     return TargetOpcode::G_SEXT;
418   case TargetLoweringBase::ZeroOrOneBooleanContent:
419     return TargetOpcode::G_ZEXT;
420   default:
421     return TargetOpcode::G_ANYEXT;
422   }
423 }
424 
425 MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
426                                                    const SrcOp &Op,
427                                                    bool IsFP) {
428   unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
429   return buildInstr(ExtOp, Res, Op);
430 }
431 
432 MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
433                                                       const DstOp &Res,
434                                                       const SrcOp &Op) {
435   assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
436           TargetOpcode::G_SEXT == ExtOpc) &&
437          "Expecting Extending Opc");
438   assert(Res.getLLTTy(*getMRI()).isScalar() ||
439          Res.getLLTTy(*getMRI()).isVector());
440   assert(Res.getLLTTy(*getMRI()).isScalar() ==
441          Op.getLLTTy(*getMRI()).isScalar());
442 
443   unsigned Opcode = TargetOpcode::COPY;
444   if (Res.getLLTTy(*getMRI()).getSizeInBits() >
445       Op.getLLTTy(*getMRI()).getSizeInBits())
446     Opcode = ExtOpc;
447   else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
448            Op.getLLTTy(*getMRI()).getSizeInBits())
449     Opcode = TargetOpcode::G_TRUNC;
450   else
451     assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
452 
453   return buildInstr(Opcode, Res, Op);
454 }
455 
456 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
457                                                        const SrcOp &Op) {
458   return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
459 }
460 
461 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
462                                                        const SrcOp &Op) {
463   return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
464 }
465 
466 MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
467                                                          const SrcOp &Op) {
468   return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
469 }
470 
471 MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
472                                                 const SrcOp &Src) {
473   LLT SrcTy = Src.getLLTTy(*getMRI());
474   LLT DstTy = Dst.getLLTTy(*getMRI());
475   if (SrcTy == DstTy)
476     return buildCopy(Dst, Src);
477 
478   unsigned Opcode;
479   if (SrcTy.isPointer() && DstTy.isScalar())
480     Opcode = TargetOpcode::G_PTRTOINT;
481   else if (DstTy.isPointer() && SrcTy.isScalar())
482     Opcode = TargetOpcode::G_INTTOPTR;
483   else {
484     assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
485     Opcode = TargetOpcode::G_BITCAST;
486   }
487 
488   return buildInstr(Opcode, Dst, Src);
489 }
490 
491 MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
492                                                    const SrcOp &Src,
493                                                    uint64_t Index) {
494   LLT SrcTy = Src.getLLTTy(*getMRI());
495   LLT DstTy = Dst.getLLTTy(*getMRI());
496 
497 #ifndef NDEBUG
498   assert(SrcTy.isValid() && "invalid operand type");
499   assert(DstTy.isValid() && "invalid operand type");
500   assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
501          "extracting off end of register");
502 #endif
503 
504   if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
505     assert(Index == 0 && "insertion past the end of a register");
506     return buildCast(Dst, Src);
507   }
508 
509   auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
510   Dst.addDefToMIB(*getMRI(), Extract);
511   Src.addSrcToMIB(Extract);
512   Extract.addImm(Index);
513   return Extract;
514 }
515 
516 void MachineIRBuilder::buildSequence(Register Res, ArrayRef<Register> Ops,
517                                      ArrayRef<uint64_t> Indices) {
518 #ifndef NDEBUG
519   assert(Ops.size() == Indices.size() && "incompatible args");
520   assert(!Ops.empty() && "invalid trivial sequence");
521   assert(std::is_sorted(Indices.begin(), Indices.end()) &&
522          "sequence offsets must be in ascending order");
523 
524   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
525   for (auto Op : Ops)
526     assert(getMRI()->getType(Op).isValid() && "invalid operand type");
527 #endif
528 
529   LLT ResTy = getMRI()->getType(Res);
530   LLT OpTy = getMRI()->getType(Ops[0]);
531   unsigned OpSize = OpTy.getSizeInBits();
532   bool MaybeMerge = true;
533   for (unsigned i = 0; i < Ops.size(); ++i) {
534     if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
535       MaybeMerge = false;
536       break;
537     }
538   }
539 
540   if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
541     buildMerge(Res, Ops);
542     return;
543   }
544 
545   Register ResIn = getMRI()->createGenericVirtualRegister(ResTy);
546   buildUndef(ResIn);
547 
548   for (unsigned i = 0; i < Ops.size(); ++i) {
549     Register ResOut = i + 1 == Ops.size()
550                           ? Res
551                           : getMRI()->createGenericVirtualRegister(ResTy);
552     buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
553     ResIn = ResOut;
554   }
555 }
556 
557 MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
558   return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
559 }
560 
561 MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
562                                                  ArrayRef<Register> Ops) {
563   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
564   // we need some temporary storage for the DstOp objects. Here we use a
565   // sufficiently large SmallVector to not go through the heap.
566   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
567   assert(TmpVec.size() > 1);
568   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
569 }
570 
571 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
572                                                    const SrcOp &Op) {
573   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
574   // we need some temporary storage for the DstOp objects. Here we use a
575   // sufficiently large SmallVector to not go through the heap.
576   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
577   assert(TmpVec.size() > 1);
578   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
579 }
580 
581 MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
582                                                    const SrcOp &Op) {
583   unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
584   SmallVector<Register, 8> TmpVec;
585   for (unsigned I = 0; I != NumReg; ++I)
586     TmpVec.push_back(getMRI()->createGenericVirtualRegister(Res));
587   return buildUnmerge(TmpVec, Op);
588 }
589 
590 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
591                                                    const SrcOp &Op) {
592   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
593   // we need some temporary storage for the DstOp objects. Here we use a
594   // sufficiently large SmallVector to not go through the heap.
595   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
596   assert(TmpVec.size() > 1);
597   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
598 }
599 
600 MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
601                                                        ArrayRef<Register> Ops) {
602   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
603   // we need some temporary storage for the DstOp objects. Here we use a
604   // sufficiently large SmallVector to not go through the heap.
605   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
606   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
607 }
608 
609 MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
610                                                        const SrcOp &Src) {
611   SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
612   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
613 }
614 
615 MachineInstrBuilder
616 MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
617                                         ArrayRef<Register> Ops) {
618   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
619   // we need some temporary storage for the DstOp objects. Here we use a
620   // sufficiently large SmallVector to not go through the heap.
621   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
622   return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
623 }
624 
625 MachineInstrBuilder
626 MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
627   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
628   // we need some temporary storage for the DstOp objects. Here we use a
629   // sufficiently large SmallVector to not go through the heap.
630   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
631   return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
632 }
633 
634 MachineInstrBuilder MachineIRBuilder::buildInsert(Register Res, Register Src,
635                                                   Register Op, unsigned Index) {
636   assert(Index + getMRI()->getType(Op).getSizeInBits() <=
637              getMRI()->getType(Res).getSizeInBits() &&
638          "insertion past the end of a register");
639 
640   if (getMRI()->getType(Res).getSizeInBits() ==
641       getMRI()->getType(Op).getSizeInBits()) {
642     return buildCast(Res, Op);
643   }
644 
645   return buildInstr(TargetOpcode::G_INSERT)
646       .addDef(Res)
647       .addUse(Src)
648       .addUse(Op)
649       .addImm(Index);
650 }
651 
652 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
653                                                      ArrayRef<Register> ResultRegs,
654                                                      bool HasSideEffects) {
655   auto MIB =
656       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
657                                 : TargetOpcode::G_INTRINSIC);
658   for (unsigned ResultReg : ResultRegs)
659     MIB.addDef(ResultReg);
660   MIB.addIntrinsicID(ID);
661   return MIB;
662 }
663 
664 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
665                                                      ArrayRef<DstOp> Results,
666                                                      bool HasSideEffects) {
667   auto MIB =
668       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
669                                 : TargetOpcode::G_INTRINSIC);
670   for (DstOp Result : Results)
671     Result.addDefToMIB(*getMRI(), MIB);
672   MIB.addIntrinsicID(ID);
673   return MIB;
674 }
675 
676 MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
677                                                  const SrcOp &Op) {
678   return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
679 }
680 
681 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
682                                                    const SrcOp &Op) {
683   return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op);
684 }
685 
686 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
687                                                 const DstOp &Res,
688                                                 const SrcOp &Op0,
689                                                 const SrcOp &Op1) {
690   return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
691 }
692 
693 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
694                                                 const DstOp &Res,
695                                                 const SrcOp &Op0,
696                                                 const SrcOp &Op1,
697                                                 Optional<unsigned> Flags) {
698 
699   return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
700 }
701 
702 MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
703                                                   const SrcOp &Tst,
704                                                   const SrcOp &Op0,
705                                                   const SrcOp &Op1,
706                                                   Optional<unsigned> Flags) {
707 
708   return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
709 }
710 
711 MachineInstrBuilder
712 MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
713                                            const SrcOp &Elt, const SrcOp &Idx) {
714   return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
715 }
716 
717 MachineInstrBuilder
718 MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
719                                             const SrcOp &Idx) {
720   return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
721 }
722 
723 MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
724     Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
725     Register NewVal, MachineMemOperand &MMO) {
726 #ifndef NDEBUG
727   LLT OldValResTy = getMRI()->getType(OldValRes);
728   LLT SuccessResTy = getMRI()->getType(SuccessRes);
729   LLT AddrTy = getMRI()->getType(Addr);
730   LLT CmpValTy = getMRI()->getType(CmpVal);
731   LLT NewValTy = getMRI()->getType(NewVal);
732   assert(OldValResTy.isScalar() && "invalid operand type");
733   assert(SuccessResTy.isScalar() && "invalid operand type");
734   assert(AddrTy.isPointer() && "invalid operand type");
735   assert(CmpValTy.isValid() && "invalid operand type");
736   assert(NewValTy.isValid() && "invalid operand type");
737   assert(OldValResTy == CmpValTy && "type mismatch");
738   assert(OldValResTy == NewValTy && "type mismatch");
739 #endif
740 
741   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
742       .addDef(OldValRes)
743       .addDef(SuccessRes)
744       .addUse(Addr)
745       .addUse(CmpVal)
746       .addUse(NewVal)
747       .addMemOperand(&MMO);
748 }
749 
750 MachineInstrBuilder
751 MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
752                                      Register CmpVal, Register NewVal,
753                                      MachineMemOperand &MMO) {
754 #ifndef NDEBUG
755   LLT OldValResTy = getMRI()->getType(OldValRes);
756   LLT AddrTy = getMRI()->getType(Addr);
757   LLT CmpValTy = getMRI()->getType(CmpVal);
758   LLT NewValTy = getMRI()->getType(NewVal);
759   assert(OldValResTy.isScalar() && "invalid operand type");
760   assert(AddrTy.isPointer() && "invalid operand type");
761   assert(CmpValTy.isValid() && "invalid operand type");
762   assert(NewValTy.isValid() && "invalid operand type");
763   assert(OldValResTy == CmpValTy && "type mismatch");
764   assert(OldValResTy == NewValTy && "type mismatch");
765 #endif
766 
767   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
768       .addDef(OldValRes)
769       .addUse(Addr)
770       .addUse(CmpVal)
771       .addUse(NewVal)
772       .addMemOperand(&MMO);
773 }
774 
775 MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(
776   unsigned Opcode, const DstOp &OldValRes,
777   const SrcOp &Addr, const SrcOp &Val,
778   MachineMemOperand &MMO) {
779 
780 #ifndef NDEBUG
781   LLT OldValResTy = OldValRes.getLLTTy(*getMRI());
782   LLT AddrTy = Addr.getLLTTy(*getMRI());
783   LLT ValTy = Val.getLLTTy(*getMRI());
784   assert(OldValResTy.isScalar() && "invalid operand type");
785   assert(AddrTy.isPointer() && "invalid operand type");
786   assert(ValTy.isValid() && "invalid operand type");
787   assert(OldValResTy == ValTy && "type mismatch");
788   assert(MMO.isAtomic() && "not atomic mem operand");
789 #endif
790 
791   auto MIB = buildInstr(Opcode);
792   OldValRes.addDefToMIB(*getMRI(), MIB);
793   Addr.addSrcToMIB(MIB);
794   Val.addSrcToMIB(MIB);
795   MIB.addMemOperand(&MMO);
796   return MIB;
797 }
798 
799 MachineInstrBuilder
800 MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
801                                      Register Val, MachineMemOperand &MMO) {
802   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
803                         MMO);
804 }
805 MachineInstrBuilder
806 MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
807                                     Register Val, MachineMemOperand &MMO) {
808   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
809                         MMO);
810 }
811 MachineInstrBuilder
812 MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
813                                     Register Val, MachineMemOperand &MMO) {
814   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
815                         MMO);
816 }
817 MachineInstrBuilder
818 MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
819                                     Register Val, MachineMemOperand &MMO) {
820   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
821                         MMO);
822 }
823 MachineInstrBuilder
824 MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
825                                      Register Val, MachineMemOperand &MMO) {
826   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
827                         MMO);
828 }
829 MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
830                                                        Register Addr,
831                                                        Register Val,
832                                                        MachineMemOperand &MMO) {
833   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
834                         MMO);
835 }
836 MachineInstrBuilder
837 MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
838                                     Register Val, MachineMemOperand &MMO) {
839   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
840                         MMO);
841 }
842 MachineInstrBuilder
843 MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
844                                     Register Val, MachineMemOperand &MMO) {
845   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
846                         MMO);
847 }
848 MachineInstrBuilder
849 MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
850                                     Register Val, MachineMemOperand &MMO) {
851   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
852                         MMO);
853 }
854 MachineInstrBuilder
855 MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
856                                      Register Val, MachineMemOperand &MMO) {
857   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
858                         MMO);
859 }
860 MachineInstrBuilder
861 MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
862                                      Register Val, MachineMemOperand &MMO) {
863   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
864                         MMO);
865 }
866 
867 MachineInstrBuilder
868 MachineIRBuilder::buildAtomicRMWFAdd(
869   const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
870   MachineMemOperand &MMO) {
871   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
872                         MMO);
873 }
874 
875 MachineInstrBuilder
876 MachineIRBuilder::buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
877                                      MachineMemOperand &MMO) {
878   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
879                         MMO);
880 }
881 
882 MachineInstrBuilder
883 MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
884   return buildInstr(TargetOpcode::G_FENCE)
885     .addImm(Ordering)
886     .addImm(Scope);
887 }
888 
889 MachineInstrBuilder
890 MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
891 #ifndef NDEBUG
892   assert(getMRI()->getType(Res).isPointer() && "invalid res type");
893 #endif
894 
895   return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
896 }
897 
898 void MachineIRBuilder::validateTruncExt(const LLT &DstTy, const LLT &SrcTy,
899                                         bool IsExtend) {
900 #ifndef NDEBUG
901   if (DstTy.isVector()) {
902     assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
903     assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
904            "different number of elements in a trunc/ext");
905   } else
906     assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
907 
908   if (IsExtend)
909     assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
910            "invalid narrowing extend");
911   else
912     assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
913            "invalid widening trunc");
914 #endif
915 }
916 
917 void MachineIRBuilder::validateSelectOp(const LLT &ResTy, const LLT &TstTy,
918                                         const LLT &Op0Ty, const LLT &Op1Ty) {
919 #ifndef NDEBUG
920   assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
921          "invalid operand type");
922   assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
923   if (ResTy.isScalar() || ResTy.isPointer())
924     assert(TstTy.isScalar() && "type mismatch");
925   else
926     assert((TstTy.isScalar() ||
927             (TstTy.isVector() &&
928              TstTy.getNumElements() == Op0Ty.getNumElements())) &&
929            "type mismatch");
930 #endif
931 }
932 
933 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
934                                                  ArrayRef<DstOp> DstOps,
935                                                  ArrayRef<SrcOp> SrcOps,
936                                                  Optional<unsigned> Flags) {
937   switch (Opc) {
938   default:
939     break;
940   case TargetOpcode::G_SELECT: {
941     assert(DstOps.size() == 1 && "Invalid select");
942     assert(SrcOps.size() == 3 && "Invalid select");
943     validateSelectOp(
944         DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
945         SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
946     break;
947   }
948   case TargetOpcode::G_ADD:
949   case TargetOpcode::G_AND:
950   case TargetOpcode::G_MUL:
951   case TargetOpcode::G_OR:
952   case TargetOpcode::G_SUB:
953   case TargetOpcode::G_XOR:
954   case TargetOpcode::G_UDIV:
955   case TargetOpcode::G_SDIV:
956   case TargetOpcode::G_UREM:
957   case TargetOpcode::G_SREM:
958   case TargetOpcode::G_SMIN:
959   case TargetOpcode::G_SMAX:
960   case TargetOpcode::G_UMIN:
961   case TargetOpcode::G_UMAX: {
962     // All these are binary ops.
963     assert(DstOps.size() == 1 && "Invalid Dst");
964     assert(SrcOps.size() == 2 && "Invalid Srcs");
965     validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
966                      SrcOps[0].getLLTTy(*getMRI()),
967                      SrcOps[1].getLLTTy(*getMRI()));
968     break;
969   }
970   case TargetOpcode::G_SHL:
971   case TargetOpcode::G_ASHR:
972   case TargetOpcode::G_LSHR: {
973     assert(DstOps.size() == 1 && "Invalid Dst");
974     assert(SrcOps.size() == 2 && "Invalid Srcs");
975     validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
976                     SrcOps[0].getLLTTy(*getMRI()),
977                     SrcOps[1].getLLTTy(*getMRI()));
978     break;
979   }
980   case TargetOpcode::G_SEXT:
981   case TargetOpcode::G_ZEXT:
982   case TargetOpcode::G_ANYEXT:
983     assert(DstOps.size() == 1 && "Invalid Dst");
984     assert(SrcOps.size() == 1 && "Invalid Srcs");
985     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
986                      SrcOps[0].getLLTTy(*getMRI()), true);
987     break;
988   case TargetOpcode::G_TRUNC:
989   case TargetOpcode::G_FPTRUNC: {
990     assert(DstOps.size() == 1 && "Invalid Dst");
991     assert(SrcOps.size() == 1 && "Invalid Srcs");
992     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
993                      SrcOps[0].getLLTTy(*getMRI()), false);
994     break;
995   }
996   case TargetOpcode::COPY:
997     assert(DstOps.size() == 1 && "Invalid Dst");
998     // If the caller wants to add a subreg source it has to be done separately
999     // so we may not have any SrcOps at this point yet.
1000     break;
1001   case TargetOpcode::G_FCMP:
1002   case TargetOpcode::G_ICMP: {
1003     assert(DstOps.size() == 1 && "Invalid Dst Operands");
1004     assert(SrcOps.size() == 3 && "Invalid Src Operands");
1005     // For F/ICMP, the first src operand is the predicate, followed by
1006     // the two comparands.
1007     assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
1008            "Expecting predicate");
1009     assert([&]() -> bool {
1010       CmpInst::Predicate Pred = SrcOps[0].getPredicate();
1011       return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
1012                                          : CmpInst::isFPPredicate(Pred);
1013     }() && "Invalid predicate");
1014     assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1015            "Type mismatch");
1016     assert([&]() -> bool {
1017       LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
1018       LLT DstTy = DstOps[0].getLLTTy(*getMRI());
1019       if (Op0Ty.isScalar() || Op0Ty.isPointer())
1020         return DstTy.isScalar();
1021       else
1022         return DstTy.isVector() &&
1023                DstTy.getNumElements() == Op0Ty.getNumElements();
1024     }() && "Type Mismatch");
1025     break;
1026   }
1027   case TargetOpcode::G_UNMERGE_VALUES: {
1028     assert(!DstOps.empty() && "Invalid trivial sequence");
1029     assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
1030     assert(std::all_of(DstOps.begin(), DstOps.end(),
1031                        [&, this](const DstOp &Op) {
1032                          return Op.getLLTTy(*getMRI()) ==
1033                                 DstOps[0].getLLTTy(*getMRI());
1034                        }) &&
1035            "type mismatch in output list");
1036     assert(DstOps.size() * DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1037                SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1038            "input operands do not cover output register");
1039     break;
1040   }
1041   case TargetOpcode::G_MERGE_VALUES: {
1042     assert(!SrcOps.empty() && "invalid trivial sequence");
1043     assert(DstOps.size() == 1 && "Invalid Dst");
1044     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1045                        [&, this](const SrcOp &Op) {
1046                          return Op.getLLTTy(*getMRI()) ==
1047                                 SrcOps[0].getLLTTy(*getMRI());
1048                        }) &&
1049            "type mismatch in input list");
1050     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1051                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1052            "input operands do not cover output register");
1053     if (SrcOps.size() == 1)
1054       return buildCast(DstOps[0], SrcOps[0]);
1055     if (DstOps[0].getLLTTy(*getMRI()).isVector())
1056       return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
1057     break;
1058   }
1059   case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
1060     assert(DstOps.size() == 1 && "Invalid Dst size");
1061     assert(SrcOps.size() == 2 && "Invalid Src size");
1062     assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1063     assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
1064             DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
1065            "Invalid operand type");
1066     assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
1067     assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
1068                DstOps[0].getLLTTy(*getMRI()) &&
1069            "Type mismatch");
1070     break;
1071   }
1072   case TargetOpcode::G_INSERT_VECTOR_ELT: {
1073     assert(DstOps.size() == 1 && "Invalid dst size");
1074     assert(SrcOps.size() == 3 && "Invalid src size");
1075     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1076            SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1077     assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
1078                SrcOps[1].getLLTTy(*getMRI()) &&
1079            "Type mismatch");
1080     assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
1081     assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
1082                SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
1083            "Type mismatch");
1084     break;
1085   }
1086   case TargetOpcode::G_BUILD_VECTOR: {
1087     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1088            "Must have at least 2 operands");
1089     assert(DstOps.size() == 1 && "Invalid DstOps");
1090     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1091            "Res type must be a vector");
1092     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1093                        [&, this](const SrcOp &Op) {
1094                          return Op.getLLTTy(*getMRI()) ==
1095                                 SrcOps[0].getLLTTy(*getMRI());
1096                        }) &&
1097            "type mismatch in input list");
1098     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1099                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1100            "input scalars do not exactly cover the output vector register");
1101     break;
1102   }
1103   case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1104     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1105            "Must have at least 2 operands");
1106     assert(DstOps.size() == 1 && "Invalid DstOps");
1107     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1108            "Res type must be a vector");
1109     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1110                        [&, this](const SrcOp &Op) {
1111                          return Op.getLLTTy(*getMRI()) ==
1112                                 SrcOps[0].getLLTTy(*getMRI());
1113                        }) &&
1114            "type mismatch in input list");
1115     if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1116         DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
1117       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1118     break;
1119   }
1120   case TargetOpcode::G_CONCAT_VECTORS: {
1121     assert(DstOps.size() == 1 && "Invalid DstOps");
1122     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1123            "Must have at least 2 operands");
1124     assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1125                        [&, this](const SrcOp &Op) {
1126                          return (Op.getLLTTy(*getMRI()).isVector() &&
1127                                  Op.getLLTTy(*getMRI()) ==
1128                                      SrcOps[0].getLLTTy(*getMRI()));
1129                        }) &&
1130            "type mismatch in input list");
1131     assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1132                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1133            "input vectors do not exactly cover the output vector register");
1134     break;
1135   }
1136   case TargetOpcode::G_UADDE: {
1137     assert(DstOps.size() == 2 && "Invalid no of dst operands");
1138     assert(SrcOps.size() == 3 && "Invalid no of src operands");
1139     assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1140     assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1141            (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1142            "Invalid operand");
1143     assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1144     assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1145            "type mismatch");
1146     break;
1147   }
1148   }
1149 
1150   auto MIB = buildInstr(Opc);
1151   for (const DstOp &Op : DstOps)
1152     Op.addDefToMIB(*getMRI(), MIB);
1153   for (const SrcOp &Op : SrcOps)
1154     Op.addSrcToMIB(MIB);
1155   if (Flags)
1156     MIB->setFlags(*Flags);
1157   return MIB;
1158 }
1159