1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 /// \file
10 /// This file implements the MachineIRBuidler class.
11 //===----------------------------------------------------------------------===//
12 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
13 
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetOpcodes.h"
20 #include "llvm/CodeGen/TargetSubtargetInfo.h"
21 #include "llvm/IR/DebugInfo.h"
22 
23 using namespace llvm;
24 
25 void MachineIRBuilder::setMF(MachineFunction &MF) {
26   this->MF = &MF;
27   this->MBB = nullptr;
28   this->MRI = &MF.getRegInfo();
29   this->TII = MF.getSubtarget().getInstrInfo();
30   this->DL = DebugLoc();
31   this->II = MachineBasicBlock::iterator();
32   this->InsertedInstr = nullptr;
33 }
34 
35 void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) {
36   this->MBB = &MBB;
37   this->II = MBB.end();
38   assert(&getMF() == MBB.getParent() &&
39          "Basic block is in a different function");
40 }
41 
42 void MachineIRBuilder::setInstr(MachineInstr &MI) {
43   assert(MI.getParent() && "Instruction is not part of a basic block");
44   setMBB(*MI.getParent());
45   this->II = MI.getIterator();
46 }
47 
48 void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB,
49                                    MachineBasicBlock::iterator II) {
50   assert(MBB.getParent() == &getMF() &&
51          "Basic block is in a different function");
52   this->MBB = &MBB;
53   this->II = II;
54 }
55 
56 void MachineIRBuilder::recordInsertions(
57     std::function<void(MachineInstr *)> Inserted) {
58   InsertedInstr = std::move(Inserted);
59 }
60 
61 void MachineIRBuilder::stopRecordingInsertions() {
62   InsertedInstr = nullptr;
63 }
64 
65 //------------------------------------------------------------------------------
66 // Build instruction variants.
67 //------------------------------------------------------------------------------
68 
69 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) {
70   return insertInstr(buildInstrNoInsert(Opcode));
71 }
72 
73 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
74   MachineInstrBuilder MIB = BuildMI(getMF(), DL, getTII().get(Opcode));
75   return MIB;
76 }
77 
78 
79 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
80   getMBB().insert(getInsertPt(), MIB);
81   if (InsertedInstr)
82     InsertedInstr(MIB);
83   return MIB;
84 }
85 
86 MachineInstrBuilder
87 MachineIRBuilder::buildDirectDbgValue(unsigned Reg, const MDNode *Variable,
88                                       const MDNode *Expr) {
89   assert(isa<DILocalVariable>(Variable) && "not a variable");
90   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
91   assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) &&
92          "Expected inlined-at fields to agree");
93   return insertInstr(BuildMI(getMF(), DL, getTII().get(TargetOpcode::DBG_VALUE),
94                              /*IsIndirect*/ false, Reg, Variable, Expr));
95 }
96 
97 MachineInstrBuilder
98 MachineIRBuilder::buildIndirectDbgValue(unsigned Reg, const MDNode *Variable,
99                                         const MDNode *Expr) {
100   assert(isa<DILocalVariable>(Variable) && "not a variable");
101   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
102   assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) &&
103          "Expected inlined-at fields to agree");
104   return insertInstr(BuildMI(getMF(), DL, getTII().get(TargetOpcode::DBG_VALUE),
105                              /*IsIndirect*/ true, Reg, Variable, Expr));
106 }
107 
108 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
109                                                       const MDNode *Variable,
110                                                       const MDNode *Expr) {
111   assert(isa<DILocalVariable>(Variable) && "not a variable");
112   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
113   assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) &&
114          "Expected inlined-at fields to agree");
115   return buildInstr(TargetOpcode::DBG_VALUE)
116       .addFrameIndex(FI)
117       .addImm(0)
118       .addMetadata(Variable)
119       .addMetadata(Expr);
120 }
121 
122 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
123                                                          const MDNode *Variable,
124                                                          const MDNode *Expr) {
125   assert(isa<DILocalVariable>(Variable) && "not a variable");
126   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
127   assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) &&
128          "Expected inlined-at fields to agree");
129   auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
130   if (auto *CI = dyn_cast<ConstantInt>(&C)) {
131     if (CI->getBitWidth() > 64)
132       MIB.addCImm(CI);
133     else
134       MIB.addImm(CI->getZExtValue());
135   } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
136     MIB.addFPImm(CFP);
137   } else {
138     // Insert %noreg if we didn't find a usable constant and had to drop it.
139     MIB.addReg(0U);
140   }
141 
142   return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
143 }
144 
145 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(unsigned Res, int Idx) {
146   assert(MRI->getType(Res).isPointer() && "invalid operand type");
147   return buildInstr(TargetOpcode::G_FRAME_INDEX)
148       .addDef(Res)
149       .addFrameIndex(Idx);
150 }
151 
152 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(unsigned Res,
153                                                        const GlobalValue *GV) {
154   assert(MRI->getType(Res).isPointer() && "invalid operand type");
155   assert(MRI->getType(Res).getAddressSpace() ==
156              GV->getType()->getAddressSpace() &&
157          "address space mismatch");
158 
159   return buildInstr(TargetOpcode::G_GLOBAL_VALUE)
160       .addDef(Res)
161       .addGlobalAddress(GV);
162 }
163 
164 MachineInstrBuilder MachineIRBuilder::buildBinaryOp(unsigned Opcode, unsigned Res, unsigned Op0,
165                                                unsigned Op1) {
166   assert((MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()) &&
167          "invalid operand type");
168   assert(MRI->getType(Res) == MRI->getType(Op0) &&
169          MRI->getType(Res) == MRI->getType(Op1) && "type mismatch");
170 
171   return buildInstr(Opcode)
172       .addDef(Res)
173       .addUse(Op0)
174       .addUse(Op1);
175 }
176 
177 MachineInstrBuilder MachineIRBuilder::buildAdd(unsigned Res, unsigned Op0,
178                                                unsigned Op1) {
179   return buildBinaryOp(TargetOpcode::G_ADD, Res, Op0, Op1);
180 }
181 
182 MachineInstrBuilder MachineIRBuilder::buildGEP(unsigned Res, unsigned Op0,
183                                                unsigned Op1) {
184   assert(MRI->getType(Res).isPointer() &&
185          MRI->getType(Res) == MRI->getType(Op0) && "type mismatch");
186   assert(MRI->getType(Op1).isScalar()  && "invalid offset type");
187 
188   return buildInstr(TargetOpcode::G_GEP)
189       .addDef(Res)
190       .addUse(Op0)
191       .addUse(Op1);
192 }
193 
194 Optional<MachineInstrBuilder>
195 MachineIRBuilder::materializeGEP(unsigned &Res, unsigned Op0,
196                                  const LLT &ValueTy, uint64_t Value) {
197   assert(Res == 0 && "Res is a result argument");
198   assert(ValueTy.isScalar()  && "invalid offset type");
199 
200   if (Value == 0) {
201     Res = Op0;
202     return None;
203   }
204 
205   Res = MRI->createGenericVirtualRegister(MRI->getType(Op0));
206   unsigned TmpReg = MRI->createGenericVirtualRegister(ValueTy);
207 
208   buildConstant(TmpReg, Value);
209   return buildGEP(Res, Op0, TmpReg);
210 }
211 
212 MachineInstrBuilder MachineIRBuilder::buildPtrMask(unsigned Res, unsigned Op0,
213                                                    uint32_t NumBits) {
214   assert(MRI->getType(Res).isPointer() &&
215          MRI->getType(Res) == MRI->getType(Op0) && "type mismatch");
216 
217   return buildInstr(TargetOpcode::G_PTR_MASK)
218       .addDef(Res)
219       .addUse(Op0)
220       .addImm(NumBits);
221 }
222 
223 MachineInstrBuilder MachineIRBuilder::buildSub(unsigned Res, unsigned Op0,
224                                                unsigned Op1) {
225   return buildBinaryOp(TargetOpcode::G_SUB, Res, Op0, Op1);
226 }
227 
228 MachineInstrBuilder MachineIRBuilder::buildMul(unsigned Res, unsigned Op0,
229                                                unsigned Op1) {
230   return buildBinaryOp(TargetOpcode::G_MUL, Res, Op0, Op1);
231 }
232 
233 MachineInstrBuilder MachineIRBuilder::buildAnd(unsigned Res, unsigned Op0,
234                                                unsigned Op1) {
235   return buildBinaryOp(TargetOpcode::G_AND, Res, Op0, Op1);
236 }
237 
238 MachineInstrBuilder MachineIRBuilder::buildOr(unsigned Res, unsigned Op0,
239                                               unsigned Op1) {
240   return buildBinaryOp(TargetOpcode::G_OR, Res, Op0, Op1);
241 }
242 
243 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
244   return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
245 }
246 
247 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(unsigned Tgt) {
248   assert(MRI->getType(Tgt).isPointer() && "invalid branch destination");
249   return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
250 }
251 
252 MachineInstrBuilder MachineIRBuilder::buildCopy(unsigned Res, unsigned Op) {
253   assert(MRI->getType(Res) == LLT() || MRI->getType(Op) == LLT() ||
254          MRI->getType(Res) == MRI->getType(Op));
255   return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op);
256 }
257 
258 MachineInstrBuilder MachineIRBuilder::buildConstant(unsigned Res,
259                                                     const ConstantInt &Val) {
260   LLT Ty = MRI->getType(Res);
261 
262   assert((Ty.isScalar() || Ty.isPointer()) && "invalid operand type");
263 
264   const ConstantInt *NewVal = &Val;
265   if (Ty.getSizeInBits() != Val.getBitWidth())
266     NewVal = ConstantInt::get(MF->getFunction().getContext(),
267                               Val.getValue().sextOrTrunc(Ty.getSizeInBits()));
268 
269   return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addCImm(NewVal);
270 }
271 
272 MachineInstrBuilder MachineIRBuilder::buildConstant(unsigned Res,
273                                                     int64_t Val) {
274   auto IntN = IntegerType::get(MF->getFunction().getContext(),
275                                MRI->getType(Res).getSizeInBits());
276   ConstantInt *CI = ConstantInt::get(IntN, Val, true);
277   return buildConstant(Res, *CI);
278 }
279 
280 MachineInstrBuilder MachineIRBuilder::buildFConstant(unsigned Res,
281                                                      const ConstantFP &Val) {
282   assert(MRI->getType(Res).isScalar() && "invalid operand type");
283 
284   return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val);
285 }
286 
287 MachineInstrBuilder MachineIRBuilder::buildFConstant(unsigned Res, double Val) {
288   LLT DstTy = MRI->getType(Res);
289   auto &Ctx = MF->getFunction().getContext();
290   auto *CFP =
291       ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getSizeInBits()));
292   return buildFConstant(Res, *CFP);
293 }
294 
295 MachineInstrBuilder MachineIRBuilder::buildBrCond(unsigned Tst,
296                                                   MachineBasicBlock &Dest) {
297   assert(MRI->getType(Tst).isScalar() && "invalid operand type");
298 
299   return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
300 }
301 
302 MachineInstrBuilder MachineIRBuilder::buildLoad(unsigned Res, unsigned Addr,
303                                                 MachineMemOperand &MMO) {
304   assert(MRI->getType(Res).isValid() && "invalid operand type");
305   assert(MRI->getType(Addr).isPointer() && "invalid operand type");
306 
307   return buildInstr(TargetOpcode::G_LOAD)
308       .addDef(Res)
309       .addUse(Addr)
310       .addMemOperand(&MMO);
311 }
312 
313 MachineInstrBuilder MachineIRBuilder::buildStore(unsigned Val, unsigned Addr,
314                                                  MachineMemOperand &MMO) {
315   assert(MRI->getType(Val).isValid() && "invalid operand type");
316   assert(MRI->getType(Addr).isPointer() && "invalid operand type");
317 
318   return buildInstr(TargetOpcode::G_STORE)
319       .addUse(Val)
320       .addUse(Addr)
321       .addMemOperand(&MMO);
322 }
323 
324 MachineInstrBuilder MachineIRBuilder::buildUAdde(unsigned Res,
325                                                  unsigned CarryOut,
326                                                  unsigned Op0, unsigned Op1,
327                                                  unsigned CarryIn) {
328   assert(MRI->getType(Res).isScalar() && "invalid operand type");
329   assert(MRI->getType(Res) == MRI->getType(Op0) &&
330          MRI->getType(Res) == MRI->getType(Op1) && "type mismatch");
331   assert(MRI->getType(CarryOut).isScalar() && "invalid operand type");
332   assert(MRI->getType(CarryOut) == MRI->getType(CarryIn) && "type mismatch");
333 
334   return buildInstr(TargetOpcode::G_UADDE)
335       .addDef(Res)
336       .addDef(CarryOut)
337       .addUse(Op0)
338       .addUse(Op1)
339       .addUse(CarryIn);
340 }
341 
342 MachineInstrBuilder MachineIRBuilder::buildAnyExt(unsigned Res, unsigned Op) {
343   validateTruncExt(Res, Op, true);
344   return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op);
345 }
346 
347 MachineInstrBuilder MachineIRBuilder::buildSExt(unsigned Res, unsigned Op) {
348   validateTruncExt(Res, Op, true);
349   return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op);
350 }
351 
352 MachineInstrBuilder MachineIRBuilder::buildZExt(unsigned Res, unsigned Op) {
353   validateTruncExt(Res, Op, true);
354   return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op);
355 }
356 
357 MachineInstrBuilder
358 MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc, unsigned Res, unsigned Op) {
359   assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
360           TargetOpcode::G_SEXT == ExtOpc) &&
361          "Expecting Extending Opc");
362   assert(MRI->getType(Res).isScalar() || MRI->getType(Res).isVector());
363   assert(MRI->getType(Res).isScalar() == MRI->getType(Op).isScalar());
364 
365   unsigned Opcode = TargetOpcode::COPY;
366   if (MRI->getType(Res).getSizeInBits() > MRI->getType(Op).getSizeInBits())
367     Opcode = ExtOpc;
368   else if (MRI->getType(Res).getSizeInBits() < MRI->getType(Op).getSizeInBits())
369     Opcode = TargetOpcode::G_TRUNC;
370   else
371     assert(MRI->getType(Res) == MRI->getType(Op));
372 
373   return buildInstr(Opcode).addDef(Res).addUse(Op);
374 }
375 
376 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(unsigned Res,
377                                                        unsigned Op) {
378   return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
379 }
380 
381 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(unsigned Res,
382                                                        unsigned Op) {
383   return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
384 }
385 
386 MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(unsigned Res,
387                                                          unsigned Op) {
388   return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
389 }
390 
391 MachineInstrBuilder MachineIRBuilder::buildCast(unsigned Dst, unsigned Src) {
392   LLT SrcTy = MRI->getType(Src);
393   LLT DstTy = MRI->getType(Dst);
394   if (SrcTy == DstTy)
395     return buildCopy(Dst, Src);
396 
397   unsigned Opcode;
398   if (SrcTy.isPointer() && DstTy.isScalar())
399     Opcode = TargetOpcode::G_PTRTOINT;
400   else if (DstTy.isPointer() && SrcTy.isScalar())
401     Opcode = TargetOpcode::G_INTTOPTR;
402   else {
403     assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
404     Opcode = TargetOpcode::G_BITCAST;
405   }
406 
407   return buildInstr(Opcode).addDef(Dst).addUse(Src);
408 }
409 
410 MachineInstrBuilder MachineIRBuilder::buildExtract(unsigned Res, unsigned Src,
411                                                    uint64_t Index) {
412 #ifndef NDEBUG
413   assert(MRI->getType(Src).isValid() && "invalid operand type");
414   assert(MRI->getType(Res).isValid() && "invalid operand type");
415   assert(Index + MRI->getType(Res).getSizeInBits() <=
416              MRI->getType(Src).getSizeInBits() &&
417          "extracting off end of register");
418 #endif
419 
420   if (MRI->getType(Res).getSizeInBits() == MRI->getType(Src).getSizeInBits()) {
421     assert(Index == 0 && "insertion past the end of a register");
422     return buildCast(Res, Src);
423   }
424 
425   return buildInstr(TargetOpcode::G_EXTRACT)
426       .addDef(Res)
427       .addUse(Src)
428       .addImm(Index);
429 }
430 
431 void MachineIRBuilder::buildSequence(unsigned Res, ArrayRef<unsigned> Ops,
432                                      ArrayRef<uint64_t> Indices) {
433 #ifndef NDEBUG
434   assert(Ops.size() == Indices.size() && "incompatible args");
435   assert(!Ops.empty() && "invalid trivial sequence");
436   assert(std::is_sorted(Indices.begin(), Indices.end()) &&
437          "sequence offsets must be in ascending order");
438 
439   assert(MRI->getType(Res).isValid() && "invalid operand type");
440   for (auto Op : Ops)
441     assert(MRI->getType(Op).isValid() && "invalid operand type");
442 #endif
443 
444   LLT ResTy = MRI->getType(Res);
445   LLT OpTy = MRI->getType(Ops[0]);
446   unsigned OpSize = OpTy.getSizeInBits();
447   bool MaybeMerge = true;
448   for (unsigned i = 0; i < Ops.size(); ++i) {
449     if (MRI->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
450       MaybeMerge = false;
451       break;
452     }
453   }
454 
455   if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
456     buildMerge(Res, Ops);
457     return;
458   }
459 
460   unsigned ResIn = MRI->createGenericVirtualRegister(ResTy);
461   buildUndef(ResIn);
462 
463   for (unsigned i = 0; i < Ops.size(); ++i) {
464     unsigned ResOut =
465         i + 1 == Ops.size() ? Res : MRI->createGenericVirtualRegister(ResTy);
466     buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
467     ResIn = ResOut;
468   }
469 }
470 
471 MachineInstrBuilder MachineIRBuilder::buildUndef(unsigned Res) {
472   return buildInstr(TargetOpcode::G_IMPLICIT_DEF).addDef(Res);
473 }
474 
475 MachineInstrBuilder MachineIRBuilder::buildMerge(unsigned Res,
476                                                  ArrayRef<unsigned> Ops) {
477 
478 #ifndef NDEBUG
479   assert(!Ops.empty() && "invalid trivial sequence");
480   LLT Ty = MRI->getType(Ops[0]);
481   for (auto Reg : Ops)
482     assert(MRI->getType(Reg) == Ty && "type mismatch in input list");
483   assert(Ops.size() * MRI->getType(Ops[0]).getSizeInBits() ==
484              MRI->getType(Res).getSizeInBits() &&
485          "input operands do not cover output register");
486 #endif
487 
488   if (Ops.size() == 1)
489     return buildCast(Res, Ops[0]);
490 
491   MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_MERGE_VALUES);
492   MIB.addDef(Res);
493   for (unsigned i = 0; i < Ops.size(); ++i)
494     MIB.addUse(Ops[i]);
495   return MIB;
496 }
497 
498 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<unsigned> Res,
499                                                    unsigned Op) {
500 
501 #ifndef NDEBUG
502   assert(!Res.empty() && "invalid trivial sequence");
503   LLT Ty = MRI->getType(Res[0]);
504   for (auto Reg : Res)
505     assert(MRI->getType(Reg) == Ty && "type mismatch in input list");
506   assert(Res.size() * MRI->getType(Res[0]).getSizeInBits() ==
507              MRI->getType(Op).getSizeInBits() &&
508          "input operands do not cover output register");
509 #endif
510 
511   MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_UNMERGE_VALUES);
512   for (unsigned i = 0; i < Res.size(); ++i)
513     MIB.addDef(Res[i]);
514   MIB.addUse(Op);
515   return MIB;
516 }
517 
518 MachineInstrBuilder MachineIRBuilder::buildInsert(unsigned Res, unsigned Src,
519                                                   unsigned Op, unsigned Index) {
520   assert(Index + MRI->getType(Op).getSizeInBits() <=
521              MRI->getType(Res).getSizeInBits() &&
522          "insertion past the end of a register");
523 
524   if (MRI->getType(Res).getSizeInBits() == MRI->getType(Op).getSizeInBits()) {
525     return buildCast(Res, Op);
526   }
527 
528   return buildInstr(TargetOpcode::G_INSERT)
529       .addDef(Res)
530       .addUse(Src)
531       .addUse(Op)
532       .addImm(Index);
533 }
534 
535 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
536                                                      unsigned Res,
537                                                      bool HasSideEffects) {
538   auto MIB =
539       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
540                                 : TargetOpcode::G_INTRINSIC);
541   if (Res)
542     MIB.addDef(Res);
543   MIB.addIntrinsicID(ID);
544   return MIB;
545 }
546 
547 MachineInstrBuilder MachineIRBuilder::buildTrunc(unsigned Res, unsigned Op) {
548   validateTruncExt(Res, Op, false);
549   return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op);
550 }
551 
552 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(unsigned Res, unsigned Op) {
553   validateTruncExt(Res, Op, false);
554   return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op);
555 }
556 
557 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
558                                                 unsigned Res, unsigned Op0,
559                                                 unsigned Op1) {
560 #ifndef NDEBUG
561   assert(MRI->getType(Op0) == MRI->getType(Op0) && "type mismatch");
562   assert(CmpInst::isIntPredicate(Pred) && "invalid predicate");
563   if (MRI->getType(Op0).isScalar() || MRI->getType(Op0).isPointer())
564     assert(MRI->getType(Res).isScalar() && "type mismatch");
565   else
566     assert(MRI->getType(Res).isVector() &&
567            MRI->getType(Res).getNumElements() ==
568                MRI->getType(Op0).getNumElements() &&
569            "type mismatch");
570 #endif
571 
572   return buildInstr(TargetOpcode::G_ICMP)
573       .addDef(Res)
574       .addPredicate(Pred)
575       .addUse(Op0)
576       .addUse(Op1);
577 }
578 
579 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
580                                                 unsigned Res, unsigned Op0,
581                                                 unsigned Op1) {
582 #ifndef NDEBUG
583   assert((MRI->getType(Op0).isScalar() || MRI->getType(Op0).isVector()) &&
584          "invalid operand type");
585   assert(MRI->getType(Op0) == MRI->getType(Op1) && "type mismatch");
586   assert(CmpInst::isFPPredicate(Pred) && "invalid predicate");
587   if (MRI->getType(Op0).isScalar())
588     assert(MRI->getType(Res).isScalar() && "type mismatch");
589   else
590     assert(MRI->getType(Res).isVector() &&
591            MRI->getType(Res).getNumElements() ==
592                MRI->getType(Op0).getNumElements() &&
593            "type mismatch");
594 #endif
595 
596   return buildInstr(TargetOpcode::G_FCMP)
597       .addDef(Res)
598       .addPredicate(Pred)
599       .addUse(Op0)
600       .addUse(Op1);
601 }
602 
603 MachineInstrBuilder MachineIRBuilder::buildSelect(unsigned Res, unsigned Tst,
604                                                   unsigned Op0, unsigned Op1) {
605 #ifndef NDEBUG
606   LLT ResTy = MRI->getType(Res);
607   assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
608          "invalid operand type");
609   assert(ResTy == MRI->getType(Op0) && ResTy == MRI->getType(Op1) &&
610          "type mismatch");
611   if (ResTy.isScalar() || ResTy.isPointer())
612     assert(MRI->getType(Tst).isScalar() && "type mismatch");
613   else
614     assert((MRI->getType(Tst).isScalar() ||
615             (MRI->getType(Tst).isVector() &&
616              MRI->getType(Tst).getNumElements() ==
617                  MRI->getType(Op0).getNumElements())) &&
618            "type mismatch");
619 #endif
620 
621   return buildInstr(TargetOpcode::G_SELECT)
622       .addDef(Res)
623       .addUse(Tst)
624       .addUse(Op0)
625       .addUse(Op1);
626 }
627 
628 MachineInstrBuilder MachineIRBuilder::buildInsertVectorElement(unsigned Res,
629                                                                unsigned Val,
630                                                                unsigned Elt,
631                                                                unsigned Idx) {
632 #ifndef NDEBUG
633   LLT ResTy = MRI->getType(Res);
634   LLT ValTy = MRI->getType(Val);
635   LLT EltTy = MRI->getType(Elt);
636   LLT IdxTy = MRI->getType(Idx);
637   assert(ResTy.isVector() && ValTy.isVector() && "invalid operand type");
638   assert(IdxTy.isScalar() && "invalid operand type");
639   assert(ResTy.getNumElements() == ValTy.getNumElements() && "type mismatch");
640   assert(ResTy.getElementType() == EltTy && "type mismatch");
641 #endif
642 
643   return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT)
644       .addDef(Res)
645       .addUse(Val)
646       .addUse(Elt)
647       .addUse(Idx);
648 }
649 
650 MachineInstrBuilder MachineIRBuilder::buildExtractVectorElement(unsigned Res,
651                                                                 unsigned Val,
652                                                                 unsigned Idx) {
653 #ifndef NDEBUG
654   LLT ResTy = MRI->getType(Res);
655   LLT ValTy = MRI->getType(Val);
656   LLT IdxTy = MRI->getType(Idx);
657   assert(ValTy.isVector() && "invalid operand type");
658   assert((ResTy.isScalar() || ResTy.isPointer()) && "invalid operand type");
659   assert(IdxTy.isScalar() && "invalid operand type");
660   assert(ValTy.getElementType() == ResTy && "type mismatch");
661 #endif
662 
663   return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT)
664       .addDef(Res)
665       .addUse(Val)
666       .addUse(Idx);
667 }
668 
669 MachineInstrBuilder
670 MachineIRBuilder::buildAtomicCmpXchg(unsigned OldValRes, unsigned Addr,
671                                      unsigned CmpVal, unsigned NewVal,
672                                      MachineMemOperand &MMO) {
673 #ifndef NDEBUG
674   LLT OldValResTy = MRI->getType(OldValRes);
675   LLT AddrTy = MRI->getType(Addr);
676   LLT CmpValTy = MRI->getType(CmpVal);
677   LLT NewValTy = MRI->getType(NewVal);
678   assert(OldValResTy.isScalar() && "invalid operand type");
679   assert(AddrTy.isPointer() && "invalid operand type");
680   assert(CmpValTy.isValid() && "invalid operand type");
681   assert(NewValTy.isValid() && "invalid operand type");
682   assert(OldValResTy == CmpValTy && "type mismatch");
683   assert(OldValResTy == NewValTy && "type mismatch");
684 #endif
685 
686   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
687       .addDef(OldValRes)
688       .addUse(Addr)
689       .addUse(CmpVal)
690       .addUse(NewVal)
691       .addMemOperand(&MMO);
692 }
693 
694 void MachineIRBuilder::validateTruncExt(unsigned Dst, unsigned Src,
695                                         bool IsExtend) {
696 #ifndef NDEBUG
697   LLT SrcTy = MRI->getType(Src);
698   LLT DstTy = MRI->getType(Dst);
699 
700   if (DstTy.isVector()) {
701     assert(SrcTy.isVector() && "mismatched cast between vecot and non-vector");
702     assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
703            "different number of elements in a trunc/ext");
704   } else
705     assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
706 
707   if (IsExtend)
708     assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
709            "invalid narrowing extend");
710   else
711     assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
712            "invalid widening trunc");
713 #endif
714 }
715