1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 /// \file
10 /// This file implements the MachineIRBuidler class.
11 //===----------------------------------------------------------------------===//
12 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
13 
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetOpcodes.h"
20 #include "llvm/CodeGen/TargetSubtargetInfo.h"
21 #include "llvm/IR/DebugInfo.h"
22 
23 using namespace llvm;
24 
25 void MachineIRBuilderBase::setMF(MachineFunction &MF) {
26   State.MF = &MF;
27   State.MBB = nullptr;
28   State.MRI = &MF.getRegInfo();
29   State.TII = MF.getSubtarget().getInstrInfo();
30   State.DL = DebugLoc();
31   State.II = MachineBasicBlock::iterator();
32   State.InsertedInstr = nullptr;
33 }
34 
35 void MachineIRBuilderBase::setMBB(MachineBasicBlock &MBB) {
36   State.MBB = &MBB;
37   State.II = MBB.end();
38   assert(&getMF() == MBB.getParent() &&
39          "Basic block is in a different function");
40 }
41 
42 void MachineIRBuilderBase::setInstr(MachineInstr &MI) {
43   assert(MI.getParent() && "Instruction is not part of a basic block");
44   setMBB(*MI.getParent());
45   State.II = MI.getIterator();
46 }
47 
48 void MachineIRBuilderBase::setInsertPt(MachineBasicBlock &MBB,
49                                        MachineBasicBlock::iterator II) {
50   assert(MBB.getParent() == &getMF() &&
51          "Basic block is in a different function");
52   State.MBB = &MBB;
53   State.II = II;
54 }
55 
56 void MachineIRBuilderBase::recordInsertions(
57     std::function<void(MachineInstr *)> Inserted) {
58   State.InsertedInstr = std::move(Inserted);
59 }
60 
61 void MachineIRBuilderBase::stopRecordingInsertions() {
62   State.InsertedInstr = nullptr;
63 }
64 
65 //------------------------------------------------------------------------------
66 // Build instruction variants.
67 //------------------------------------------------------------------------------
68 
69 MachineInstrBuilder MachineIRBuilderBase::buildInstr(unsigned Opcode) {
70   return insertInstr(buildInstrNoInsert(Opcode));
71 }
72 
73 MachineInstrBuilder MachineIRBuilderBase::buildInstrNoInsert(unsigned Opcode) {
74   MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
75   return MIB;
76 }
77 
78 MachineInstrBuilder MachineIRBuilderBase::insertInstr(MachineInstrBuilder MIB) {
79   getMBB().insert(getInsertPt(), MIB);
80   if (State.InsertedInstr)
81     State.InsertedInstr(MIB);
82   return MIB;
83 }
84 
85 MachineInstrBuilder
86 MachineIRBuilderBase::buildDirectDbgValue(unsigned Reg, const MDNode *Variable,
87                                           const MDNode *Expr) {
88   assert(isa<DILocalVariable>(Variable) && "not a variable");
89   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
90   assert(
91       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
92       "Expected inlined-at fields to agree");
93   return insertInstr(BuildMI(getMF(), getDL(),
94                              getTII().get(TargetOpcode::DBG_VALUE),
95                              /*IsIndirect*/ false, Reg, Variable, Expr));
96 }
97 
98 MachineInstrBuilder MachineIRBuilderBase::buildIndirectDbgValue(
99     unsigned Reg, const MDNode *Variable, const MDNode *Expr) {
100   assert(isa<DILocalVariable>(Variable) && "not a variable");
101   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
102   assert(
103       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
104       "Expected inlined-at fields to agree");
105   return insertInstr(BuildMI(getMF(), getDL(),
106                              getTII().get(TargetOpcode::DBG_VALUE),
107                              /*IsIndirect*/ true, Reg, Variable, Expr));
108 }
109 
110 MachineInstrBuilder
111 MachineIRBuilderBase::buildFIDbgValue(int FI, const MDNode *Variable,
112                                       const MDNode *Expr) {
113   assert(isa<DILocalVariable>(Variable) && "not a variable");
114   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
115   assert(
116       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
117       "Expected inlined-at fields to agree");
118   return buildInstr(TargetOpcode::DBG_VALUE)
119       .addFrameIndex(FI)
120       .addImm(0)
121       .addMetadata(Variable)
122       .addMetadata(Expr);
123 }
124 
125 MachineInstrBuilder MachineIRBuilderBase::buildConstDbgValue(
126     const Constant &C, const MDNode *Variable, const MDNode *Expr) {
127   assert(isa<DILocalVariable>(Variable) && "not a variable");
128   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
129   assert(
130       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
131       "Expected inlined-at fields to agree");
132   auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
133   if (auto *CI = dyn_cast<ConstantInt>(&C)) {
134     if (CI->getBitWidth() > 64)
135       MIB.addCImm(CI);
136     else
137       MIB.addImm(CI->getZExtValue());
138   } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
139     MIB.addFPImm(CFP);
140   } else {
141     // Insert %noreg if we didn't find a usable constant and had to drop it.
142     MIB.addReg(0U);
143   }
144 
145   return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
146 }
147 
148 MachineInstrBuilder MachineIRBuilderBase::buildFrameIndex(unsigned Res,
149                                                           int Idx) {
150   assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
151   return buildInstr(TargetOpcode::G_FRAME_INDEX)
152       .addDef(Res)
153       .addFrameIndex(Idx);
154 }
155 
156 MachineInstrBuilder
157 MachineIRBuilderBase::buildGlobalValue(unsigned Res, const GlobalValue *GV) {
158   assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
159   assert(getMRI()->getType(Res).getAddressSpace() ==
160              GV->getType()->getAddressSpace() &&
161          "address space mismatch");
162 
163   return buildInstr(TargetOpcode::G_GLOBAL_VALUE)
164       .addDef(Res)
165       .addGlobalAddress(GV);
166 }
167 
168 void MachineIRBuilderBase::validateBinaryOp(unsigned Res, unsigned Op0,
169                                             unsigned Op1) {
170   assert((getMRI()->getType(Res).isScalar() ||
171           getMRI()->getType(Res).isVector()) &&
172          "invalid operand type");
173   assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
174          getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
175 }
176 
177 MachineInstrBuilder MachineIRBuilderBase::buildGEP(unsigned Res, unsigned Op0,
178                                                    unsigned Op1) {
179   assert(getMRI()->getType(Res).isPointer() &&
180          getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
181   assert(getMRI()->getType(Op1).isScalar() && "invalid offset type");
182 
183   return buildInstr(TargetOpcode::G_GEP)
184       .addDef(Res)
185       .addUse(Op0)
186       .addUse(Op1);
187 }
188 
189 Optional<MachineInstrBuilder>
190 MachineIRBuilderBase::materializeGEP(unsigned &Res, unsigned Op0,
191                                      const LLT &ValueTy, uint64_t Value) {
192   assert(Res == 0 && "Res is a result argument");
193   assert(ValueTy.isScalar()  && "invalid offset type");
194 
195   if (Value == 0) {
196     Res = Op0;
197     return None;
198   }
199 
200   Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
201   unsigned TmpReg = getMRI()->createGenericVirtualRegister(ValueTy);
202 
203   buildConstant(TmpReg, Value);
204   return buildGEP(Res, Op0, TmpReg);
205 }
206 
207 MachineInstrBuilder MachineIRBuilderBase::buildPtrMask(unsigned Res,
208                                                        unsigned Op0,
209                                                        uint32_t NumBits) {
210   assert(getMRI()->getType(Res).isPointer() &&
211          getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
212 
213   return buildInstr(TargetOpcode::G_PTR_MASK)
214       .addDef(Res)
215       .addUse(Op0)
216       .addImm(NumBits);
217 }
218 
219 MachineInstrBuilder MachineIRBuilderBase::buildBr(MachineBasicBlock &Dest) {
220   return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
221 }
222 
223 MachineInstrBuilder MachineIRBuilderBase::buildBrIndirect(unsigned Tgt) {
224   assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
225   return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
226 }
227 
228 MachineInstrBuilder MachineIRBuilderBase::buildCopy(unsigned Res, unsigned Op) {
229   assert(getMRI()->getType(Res) == LLT() || getMRI()->getType(Op) == LLT() ||
230          getMRI()->getType(Res) == getMRI()->getType(Op));
231   return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op);
232 }
233 
234 MachineInstrBuilder
235 MachineIRBuilderBase::buildConstant(unsigned Res, const ConstantInt &Val) {
236   LLT Ty = getMRI()->getType(Res);
237 
238   assert((Ty.isScalar() || Ty.isPointer()) && "invalid operand type");
239 
240   const ConstantInt *NewVal = &Val;
241   if (Ty.getSizeInBits() != Val.getBitWidth())
242     NewVal = ConstantInt::get(getMF().getFunction().getContext(),
243                               Val.getValue().sextOrTrunc(Ty.getSizeInBits()));
244 
245   return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addCImm(NewVal);
246 }
247 
248 MachineInstrBuilder MachineIRBuilderBase::buildConstant(unsigned Res,
249                                                         int64_t Val) {
250   auto IntN = IntegerType::get(getMF().getFunction().getContext(),
251                                getMRI()->getType(Res).getSizeInBits());
252   ConstantInt *CI = ConstantInt::get(IntN, Val, true);
253   return buildConstant(Res, *CI);
254 }
255 
256 MachineInstrBuilder
257 MachineIRBuilderBase::buildFConstant(unsigned Res, const ConstantFP &Val) {
258   assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
259 
260   return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val);
261 }
262 
263 MachineInstrBuilder MachineIRBuilderBase::buildFConstant(unsigned Res,
264                                                          double Val) {
265   LLT DstTy = getMRI()->getType(Res);
266   auto &Ctx = getMF().getFunction().getContext();
267   auto *CFP =
268       ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getSizeInBits()));
269   return buildFConstant(Res, *CFP);
270 }
271 
272 MachineInstrBuilder MachineIRBuilderBase::buildBrCond(unsigned Tst,
273                                                       MachineBasicBlock &Dest) {
274   assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
275 
276   return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
277 }
278 
279 MachineInstrBuilder MachineIRBuilderBase::buildLoad(unsigned Res, unsigned Addr,
280                                                     MachineMemOperand &MMO) {
281   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
282   assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
283 
284   return buildInstr(TargetOpcode::G_LOAD)
285       .addDef(Res)
286       .addUse(Addr)
287       .addMemOperand(&MMO);
288 }
289 
290 MachineInstrBuilder MachineIRBuilderBase::buildStore(unsigned Val,
291                                                      unsigned Addr,
292                                                      MachineMemOperand &MMO) {
293   assert(getMRI()->getType(Val).isValid() && "invalid operand type");
294   assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
295 
296   return buildInstr(TargetOpcode::G_STORE)
297       .addUse(Val)
298       .addUse(Addr)
299       .addMemOperand(&MMO);
300 }
301 
302 MachineInstrBuilder MachineIRBuilderBase::buildUAdde(unsigned Res,
303                                                      unsigned CarryOut,
304                                                      unsigned Op0, unsigned Op1,
305                                                      unsigned CarryIn) {
306   assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
307   assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
308          getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
309   assert(getMRI()->getType(CarryOut).isScalar() && "invalid operand type");
310   assert(getMRI()->getType(CarryOut) == getMRI()->getType(CarryIn) &&
311          "type mismatch");
312 
313   return buildInstr(TargetOpcode::G_UADDE)
314       .addDef(Res)
315       .addDef(CarryOut)
316       .addUse(Op0)
317       .addUse(Op1)
318       .addUse(CarryIn);
319 }
320 
321 MachineInstrBuilder MachineIRBuilderBase::buildAnyExt(unsigned Res,
322                                                       unsigned Op) {
323   validateTruncExt(Res, Op, true);
324   return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op);
325 }
326 
327 MachineInstrBuilder MachineIRBuilderBase::buildSExt(unsigned Res, unsigned Op) {
328   validateTruncExt(Res, Op, true);
329   return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op);
330 }
331 
332 MachineInstrBuilder MachineIRBuilderBase::buildZExt(unsigned Res, unsigned Op) {
333   validateTruncExt(Res, Op, true);
334   return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op);
335 }
336 
337 MachineInstrBuilder MachineIRBuilderBase::buildExtOrTrunc(unsigned ExtOpc,
338                                                           unsigned Res,
339                                                           unsigned Op) {
340   assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
341           TargetOpcode::G_SEXT == ExtOpc) &&
342          "Expecting Extending Opc");
343   assert(getMRI()->getType(Res).isScalar() ||
344          getMRI()->getType(Res).isVector());
345   assert(getMRI()->getType(Res).isScalar() == getMRI()->getType(Op).isScalar());
346 
347   unsigned Opcode = TargetOpcode::COPY;
348   if (getMRI()->getType(Res).getSizeInBits() >
349       getMRI()->getType(Op).getSizeInBits())
350     Opcode = ExtOpc;
351   else if (getMRI()->getType(Res).getSizeInBits() <
352            getMRI()->getType(Op).getSizeInBits())
353     Opcode = TargetOpcode::G_TRUNC;
354   else
355     assert(getMRI()->getType(Res) == getMRI()->getType(Op));
356 
357   return buildInstr(Opcode).addDef(Res).addUse(Op);
358 }
359 
360 MachineInstrBuilder MachineIRBuilderBase::buildSExtOrTrunc(unsigned Res,
361                                                            unsigned Op) {
362   return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
363 }
364 
365 MachineInstrBuilder MachineIRBuilderBase::buildZExtOrTrunc(unsigned Res,
366                                                            unsigned Op) {
367   return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
368 }
369 
370 MachineInstrBuilder MachineIRBuilderBase::buildAnyExtOrTrunc(unsigned Res,
371                                                              unsigned Op) {
372   return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
373 }
374 
375 MachineInstrBuilder MachineIRBuilderBase::buildCast(unsigned Dst,
376                                                     unsigned Src) {
377   LLT SrcTy = getMRI()->getType(Src);
378   LLT DstTy = getMRI()->getType(Dst);
379   if (SrcTy == DstTy)
380     return buildCopy(Dst, Src);
381 
382   unsigned Opcode;
383   if (SrcTy.isPointer() && DstTy.isScalar())
384     Opcode = TargetOpcode::G_PTRTOINT;
385   else if (DstTy.isPointer() && SrcTy.isScalar())
386     Opcode = TargetOpcode::G_INTTOPTR;
387   else {
388     assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
389     Opcode = TargetOpcode::G_BITCAST;
390   }
391 
392   return buildInstr(Opcode).addDef(Dst).addUse(Src);
393 }
394 
395 MachineInstrBuilder
396 MachineIRBuilderBase::buildExtract(unsigned Res, unsigned Src, uint64_t Index) {
397 #ifndef NDEBUG
398   assert(getMRI()->getType(Src).isValid() && "invalid operand type");
399   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
400   assert(Index + getMRI()->getType(Res).getSizeInBits() <=
401              getMRI()->getType(Src).getSizeInBits() &&
402          "extracting off end of register");
403 #endif
404 
405   if (getMRI()->getType(Res).getSizeInBits() ==
406       getMRI()->getType(Src).getSizeInBits()) {
407     assert(Index == 0 && "insertion past the end of a register");
408     return buildCast(Res, Src);
409   }
410 
411   return buildInstr(TargetOpcode::G_EXTRACT)
412       .addDef(Res)
413       .addUse(Src)
414       .addImm(Index);
415 }
416 
417 void MachineIRBuilderBase::buildSequence(unsigned Res, ArrayRef<unsigned> Ops,
418                                          ArrayRef<uint64_t> Indices) {
419 #ifndef NDEBUG
420   assert(Ops.size() == Indices.size() && "incompatible args");
421   assert(!Ops.empty() && "invalid trivial sequence");
422   assert(std::is_sorted(Indices.begin(), Indices.end()) &&
423          "sequence offsets must be in ascending order");
424 
425   assert(getMRI()->getType(Res).isValid() && "invalid operand type");
426   for (auto Op : Ops)
427     assert(getMRI()->getType(Op).isValid() && "invalid operand type");
428 #endif
429 
430   LLT ResTy = getMRI()->getType(Res);
431   LLT OpTy = getMRI()->getType(Ops[0]);
432   unsigned OpSize = OpTy.getSizeInBits();
433   bool MaybeMerge = true;
434   for (unsigned i = 0; i < Ops.size(); ++i) {
435     if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
436       MaybeMerge = false;
437       break;
438     }
439   }
440 
441   if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
442     buildMerge(Res, Ops);
443     return;
444   }
445 
446   unsigned ResIn = getMRI()->createGenericVirtualRegister(ResTy);
447   buildUndef(ResIn);
448 
449   for (unsigned i = 0; i < Ops.size(); ++i) {
450     unsigned ResOut = i + 1 == Ops.size()
451                           ? Res
452                           : getMRI()->createGenericVirtualRegister(ResTy);
453     buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
454     ResIn = ResOut;
455   }
456 }
457 
458 MachineInstrBuilder MachineIRBuilderBase::buildUndef(unsigned Res) {
459   return buildInstr(TargetOpcode::G_IMPLICIT_DEF).addDef(Res);
460 }
461 
462 MachineInstrBuilder MachineIRBuilderBase::buildMerge(unsigned Res,
463                                                      ArrayRef<unsigned> Ops) {
464 
465 #ifndef NDEBUG
466   assert(!Ops.empty() && "invalid trivial sequence");
467   LLT Ty = getMRI()->getType(Ops[0]);
468   for (auto Reg : Ops)
469     assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
470   assert(Ops.size() * getMRI()->getType(Ops[0]).getSizeInBits() ==
471              getMRI()->getType(Res).getSizeInBits() &&
472          "input operands do not cover output register");
473 #endif
474 
475   if (Ops.size() == 1)
476     return buildCast(Res, Ops[0]);
477 
478   MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_MERGE_VALUES);
479   MIB.addDef(Res);
480   for (unsigned i = 0; i < Ops.size(); ++i)
481     MIB.addUse(Ops[i]);
482   return MIB;
483 }
484 
485 MachineInstrBuilder MachineIRBuilderBase::buildUnmerge(ArrayRef<unsigned> Res,
486                                                        unsigned Op) {
487 
488 #ifndef NDEBUG
489   assert(!Res.empty() && "invalid trivial sequence");
490   LLT Ty = getMRI()->getType(Res[0]);
491   for (auto Reg : Res)
492     assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
493   assert(Res.size() * getMRI()->getType(Res[0]).getSizeInBits() ==
494              getMRI()->getType(Op).getSizeInBits() &&
495          "input operands do not cover output register");
496 #endif
497 
498   MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_UNMERGE_VALUES);
499   for (unsigned i = 0; i < Res.size(); ++i)
500     MIB.addDef(Res[i]);
501   MIB.addUse(Op);
502   return MIB;
503 }
504 
505 MachineInstrBuilder MachineIRBuilderBase::buildInsert(unsigned Res,
506                                                       unsigned Src, unsigned Op,
507                                                       unsigned Index) {
508   assert(Index + getMRI()->getType(Op).getSizeInBits() <=
509              getMRI()->getType(Res).getSizeInBits() &&
510          "insertion past the end of a register");
511 
512   if (getMRI()->getType(Res).getSizeInBits() ==
513       getMRI()->getType(Op).getSizeInBits()) {
514     return buildCast(Res, Op);
515   }
516 
517   return buildInstr(TargetOpcode::G_INSERT)
518       .addDef(Res)
519       .addUse(Src)
520       .addUse(Op)
521       .addImm(Index);
522 }
523 
524 MachineInstrBuilder MachineIRBuilderBase::buildIntrinsic(Intrinsic::ID ID,
525                                                          unsigned Res,
526                                                          bool HasSideEffects) {
527   auto MIB =
528       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
529                                 : TargetOpcode::G_INTRINSIC);
530   if (Res)
531     MIB.addDef(Res);
532   MIB.addIntrinsicID(ID);
533   return MIB;
534 }
535 
536 MachineInstrBuilder MachineIRBuilderBase::buildTrunc(unsigned Res,
537                                                      unsigned Op) {
538   validateTruncExt(Res, Op, false);
539   return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op);
540 }
541 
542 MachineInstrBuilder MachineIRBuilderBase::buildFPTrunc(unsigned Res,
543                                                        unsigned Op) {
544   validateTruncExt(Res, Op, false);
545   return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op);
546 }
547 
548 MachineInstrBuilder MachineIRBuilderBase::buildICmp(CmpInst::Predicate Pred,
549                                                     unsigned Res, unsigned Op0,
550                                                     unsigned Op1) {
551 #ifndef NDEBUG
552   assert(getMRI()->getType(Op0) == getMRI()->getType(Op0) && "type mismatch");
553   assert(CmpInst::isIntPredicate(Pred) && "invalid predicate");
554   if (getMRI()->getType(Op0).isScalar() || getMRI()->getType(Op0).isPointer())
555     assert(getMRI()->getType(Res).isScalar() && "type mismatch");
556   else
557     assert(getMRI()->getType(Res).isVector() &&
558            getMRI()->getType(Res).getNumElements() ==
559                getMRI()->getType(Op0).getNumElements() &&
560            "type mismatch");
561 #endif
562 
563   return buildInstr(TargetOpcode::G_ICMP)
564       .addDef(Res)
565       .addPredicate(Pred)
566       .addUse(Op0)
567       .addUse(Op1);
568 }
569 
570 MachineInstrBuilder MachineIRBuilderBase::buildFCmp(CmpInst::Predicate Pred,
571                                                     unsigned Res, unsigned Op0,
572                                                     unsigned Op1) {
573 #ifndef NDEBUG
574   assert((getMRI()->getType(Op0).isScalar() ||
575           getMRI()->getType(Op0).isVector()) &&
576          "invalid operand type");
577   assert(getMRI()->getType(Op0) == getMRI()->getType(Op1) && "type mismatch");
578   assert(CmpInst::isFPPredicate(Pred) && "invalid predicate");
579   if (getMRI()->getType(Op0).isScalar())
580     assert(getMRI()->getType(Res).isScalar() && "type mismatch");
581   else
582     assert(getMRI()->getType(Res).isVector() &&
583            getMRI()->getType(Res).getNumElements() ==
584                getMRI()->getType(Op0).getNumElements() &&
585            "type mismatch");
586 #endif
587 
588   return buildInstr(TargetOpcode::G_FCMP)
589       .addDef(Res)
590       .addPredicate(Pred)
591       .addUse(Op0)
592       .addUse(Op1);
593 }
594 
595 MachineInstrBuilder MachineIRBuilderBase::buildSelect(unsigned Res,
596                                                       unsigned Tst,
597                                                       unsigned Op0,
598                                                       unsigned Op1) {
599 #ifndef NDEBUG
600   LLT ResTy = getMRI()->getType(Res);
601   assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
602          "invalid operand type");
603   assert(ResTy == getMRI()->getType(Op0) && ResTy == getMRI()->getType(Op1) &&
604          "type mismatch");
605   if (ResTy.isScalar() || ResTy.isPointer())
606     assert(getMRI()->getType(Tst).isScalar() && "type mismatch");
607   else
608     assert((getMRI()->getType(Tst).isScalar() ||
609             (getMRI()->getType(Tst).isVector() &&
610              getMRI()->getType(Tst).getNumElements() ==
611                  getMRI()->getType(Op0).getNumElements())) &&
612            "type mismatch");
613 #endif
614 
615   return buildInstr(TargetOpcode::G_SELECT)
616       .addDef(Res)
617       .addUse(Tst)
618       .addUse(Op0)
619       .addUse(Op1);
620 }
621 
622 MachineInstrBuilder
623 MachineIRBuilderBase::buildInsertVectorElement(unsigned Res, unsigned Val,
624                                                unsigned Elt, unsigned Idx) {
625 #ifndef NDEBUG
626   LLT ResTy = getMRI()->getType(Res);
627   LLT ValTy = getMRI()->getType(Val);
628   LLT EltTy = getMRI()->getType(Elt);
629   LLT IdxTy = getMRI()->getType(Idx);
630   assert(ResTy.isVector() && ValTy.isVector() && "invalid operand type");
631   assert(IdxTy.isScalar() && "invalid operand type");
632   assert(ResTy.getNumElements() == ValTy.getNumElements() && "type mismatch");
633   assert(ResTy.getElementType() == EltTy && "type mismatch");
634 #endif
635 
636   return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT)
637       .addDef(Res)
638       .addUse(Val)
639       .addUse(Elt)
640       .addUse(Idx);
641 }
642 
643 MachineInstrBuilder
644 MachineIRBuilderBase::buildExtractVectorElement(unsigned Res, unsigned Val,
645                                                 unsigned Idx) {
646 #ifndef NDEBUG
647   LLT ResTy = getMRI()->getType(Res);
648   LLT ValTy = getMRI()->getType(Val);
649   LLT IdxTy = getMRI()->getType(Idx);
650   assert(ValTy.isVector() && "invalid operand type");
651   assert((ResTy.isScalar() || ResTy.isPointer()) && "invalid operand type");
652   assert(IdxTy.isScalar() && "invalid operand type");
653   assert(ValTy.getElementType() == ResTy && "type mismatch");
654 #endif
655 
656   return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT)
657       .addDef(Res)
658       .addUse(Val)
659       .addUse(Idx);
660 }
661 
662 MachineInstrBuilder
663 MachineIRBuilderBase::buildAtomicCmpXchg(unsigned OldValRes, unsigned Addr,
664                                          unsigned CmpVal, unsigned NewVal,
665                                          MachineMemOperand &MMO) {
666 #ifndef NDEBUG
667   LLT OldValResTy = getMRI()->getType(OldValRes);
668   LLT AddrTy = getMRI()->getType(Addr);
669   LLT CmpValTy = getMRI()->getType(CmpVal);
670   LLT NewValTy = getMRI()->getType(NewVal);
671   assert(OldValResTy.isScalar() && "invalid operand type");
672   assert(AddrTy.isPointer() && "invalid operand type");
673   assert(CmpValTy.isValid() && "invalid operand type");
674   assert(NewValTy.isValid() && "invalid operand type");
675   assert(OldValResTy == CmpValTy && "type mismatch");
676   assert(OldValResTy == NewValTy && "type mismatch");
677 #endif
678 
679   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
680       .addDef(OldValRes)
681       .addUse(Addr)
682       .addUse(CmpVal)
683       .addUse(NewVal)
684       .addMemOperand(&MMO);
685 }
686 
687 void MachineIRBuilderBase::validateTruncExt(unsigned Dst, unsigned Src,
688                                             bool IsExtend) {
689 #ifndef NDEBUG
690   LLT SrcTy = getMRI()->getType(Src);
691   LLT DstTy = getMRI()->getType(Dst);
692 
693   if (DstTy.isVector()) {
694     assert(SrcTy.isVector() && "mismatched cast between vecot and non-vector");
695     assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
696            "different number of elements in a trunc/ext");
697   } else
698     assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
699 
700   if (IsExtend)
701     assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
702            "invalid narrowing extend");
703   else
704     assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
705            "invalid widening trunc");
706 #endif
707 }
708