1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 /// \file 9 /// This file implements the MachineIRBuidler class. 10 //===----------------------------------------------------------------------===// 11 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h" 12 #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h" 13 14 #include "llvm/CodeGen/MachineFunction.h" 15 #include "llvm/CodeGen/MachineInstr.h" 16 #include "llvm/CodeGen/MachineInstrBuilder.h" 17 #include "llvm/CodeGen/MachineRegisterInfo.h" 18 #include "llvm/CodeGen/TargetInstrInfo.h" 19 #include "llvm/CodeGen/TargetOpcodes.h" 20 #include "llvm/CodeGen/TargetSubtargetInfo.h" 21 #include "llvm/IR/DebugInfo.h" 22 23 using namespace llvm; 24 25 void MachineIRBuilder::setMF(MachineFunction &MF) { 26 State.MF = &MF; 27 State.MBB = nullptr; 28 State.MRI = &MF.getRegInfo(); 29 State.TII = MF.getSubtarget().getInstrInfo(); 30 State.DL = DebugLoc(); 31 State.II = MachineBasicBlock::iterator(); 32 State.Observer = nullptr; 33 } 34 35 void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) { 36 State.MBB = &MBB; 37 State.II = MBB.end(); 38 assert(&getMF() == MBB.getParent() && 39 "Basic block is in a different function"); 40 } 41 42 void MachineIRBuilder::setInstr(MachineInstr &MI) { 43 assert(MI.getParent() && "Instruction is not part of a basic block"); 44 setMBB(*MI.getParent()); 45 State.II = MI.getIterator(); 46 } 47 48 void MachineIRBuilder::setCSEInfo(GISelCSEInfo *Info) { State.CSEInfo = Info; } 49 50 void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB, 51 MachineBasicBlock::iterator II) { 52 assert(MBB.getParent() == &getMF() && 53 "Basic block is in a different function"); 54 State.MBB = &MBB; 55 State.II = II; 56 } 57 58 void MachineIRBuilder::recordInsertion(MachineInstr *InsertedInstr) const { 59 if (State.Observer) 60 State.Observer->createdInstr(*InsertedInstr); 61 } 62 63 void MachineIRBuilder::setChangeObserver(GISelChangeObserver &Observer) { 64 State.Observer = &Observer; 65 } 66 67 void MachineIRBuilder::stopObservingChanges() { State.Observer = nullptr; } 68 69 //------------------------------------------------------------------------------ 70 // Build instruction variants. 71 //------------------------------------------------------------------------------ 72 73 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) { 74 return insertInstr(buildInstrNoInsert(Opcode)); 75 } 76 77 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) { 78 MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode)); 79 return MIB; 80 } 81 82 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) { 83 getMBB().insert(getInsertPt(), MIB); 84 recordInsertion(MIB); 85 return MIB; 86 } 87 88 MachineInstrBuilder 89 MachineIRBuilder::buildDirectDbgValue(unsigned Reg, const MDNode *Variable, 90 const MDNode *Expr) { 91 assert(isa<DILocalVariable>(Variable) && "not a variable"); 92 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 93 assert( 94 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) && 95 "Expected inlined-at fields to agree"); 96 return insertInstr(BuildMI(getMF(), getDL(), 97 getTII().get(TargetOpcode::DBG_VALUE), 98 /*IsIndirect*/ false, Reg, Variable, Expr)); 99 } 100 101 MachineInstrBuilder 102 MachineIRBuilder::buildIndirectDbgValue(unsigned Reg, const MDNode *Variable, 103 const MDNode *Expr) { 104 assert(isa<DILocalVariable>(Variable) && "not a variable"); 105 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 106 assert( 107 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) && 108 "Expected inlined-at fields to agree"); 109 return insertInstr(BuildMI(getMF(), getDL(), 110 getTII().get(TargetOpcode::DBG_VALUE), 111 /*IsIndirect*/ true, Reg, Variable, Expr)); 112 } 113 114 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI, 115 const MDNode *Variable, 116 const MDNode *Expr) { 117 assert(isa<DILocalVariable>(Variable) && "not a variable"); 118 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 119 assert( 120 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) && 121 "Expected inlined-at fields to agree"); 122 return buildInstr(TargetOpcode::DBG_VALUE) 123 .addFrameIndex(FI) 124 .addImm(0) 125 .addMetadata(Variable) 126 .addMetadata(Expr); 127 } 128 129 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C, 130 const MDNode *Variable, 131 const MDNode *Expr) { 132 assert(isa<DILocalVariable>(Variable) && "not a variable"); 133 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 134 assert( 135 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) && 136 "Expected inlined-at fields to agree"); 137 auto MIB = buildInstr(TargetOpcode::DBG_VALUE); 138 if (auto *CI = dyn_cast<ConstantInt>(&C)) { 139 if (CI->getBitWidth() > 64) 140 MIB.addCImm(CI); 141 else 142 MIB.addImm(CI->getZExtValue()); 143 } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) { 144 MIB.addFPImm(CFP); 145 } else { 146 // Insert %noreg if we didn't find a usable constant and had to drop it. 147 MIB.addReg(0U); 148 } 149 150 return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr); 151 } 152 153 MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) { 154 assert(isa<DILabel>(Label) && "not a label"); 155 assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) && 156 "Expected inlined-at fields to agree"); 157 auto MIB = buildInstr(TargetOpcode::DBG_LABEL); 158 159 return MIB.addMetadata(Label); 160 } 161 162 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(unsigned Res, int Idx) { 163 assert(getMRI()->getType(Res).isPointer() && "invalid operand type"); 164 return buildInstr(TargetOpcode::G_FRAME_INDEX) 165 .addDef(Res) 166 .addFrameIndex(Idx); 167 } 168 169 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(unsigned Res, 170 const GlobalValue *GV) { 171 assert(getMRI()->getType(Res).isPointer() && "invalid operand type"); 172 assert(getMRI()->getType(Res).getAddressSpace() == 173 GV->getType()->getAddressSpace() && 174 "address space mismatch"); 175 176 return buildInstr(TargetOpcode::G_GLOBAL_VALUE) 177 .addDef(Res) 178 .addGlobalAddress(GV); 179 } 180 181 void MachineIRBuilder::validateBinaryOp(const LLT &Res, const LLT &Op0, 182 const LLT &Op1) { 183 assert((Res.isScalar() || Res.isVector()) && "invalid operand type"); 184 assert((Res == Op0 && Res == Op1) && "type mismatch"); 185 } 186 187 MachineInstrBuilder MachineIRBuilder::buildGEP(unsigned Res, unsigned Op0, 188 unsigned Op1) { 189 assert(getMRI()->getType(Res).isPointer() && 190 getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch"); 191 assert(getMRI()->getType(Op1).isScalar() && "invalid offset type"); 192 193 return buildInstr(TargetOpcode::G_GEP) 194 .addDef(Res) 195 .addUse(Op0) 196 .addUse(Op1); 197 } 198 199 Optional<MachineInstrBuilder> 200 MachineIRBuilder::materializeGEP(unsigned &Res, unsigned Op0, 201 const LLT &ValueTy, uint64_t Value) { 202 assert(Res == 0 && "Res is a result argument"); 203 assert(ValueTy.isScalar() && "invalid offset type"); 204 205 if (Value == 0) { 206 Res = Op0; 207 return None; 208 } 209 210 Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0)); 211 unsigned TmpReg = getMRI()->createGenericVirtualRegister(ValueTy); 212 213 buildConstant(TmpReg, Value); 214 return buildGEP(Res, Op0, TmpReg); 215 } 216 217 MachineInstrBuilder MachineIRBuilder::buildPtrMask(unsigned Res, unsigned Op0, 218 uint32_t NumBits) { 219 assert(getMRI()->getType(Res).isPointer() && 220 getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch"); 221 222 return buildInstr(TargetOpcode::G_PTR_MASK) 223 .addDef(Res) 224 .addUse(Op0) 225 .addImm(NumBits); 226 } 227 228 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) { 229 return buildInstr(TargetOpcode::G_BR).addMBB(&Dest); 230 } 231 232 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(unsigned Tgt) { 233 assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination"); 234 return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt); 235 } 236 237 MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res, 238 const SrcOp &Op) { 239 return buildInstr(TargetOpcode::COPY, Res, Op); 240 } 241 242 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res, 243 const ConstantInt &Val) { 244 LLT Ty = Res.getLLTTy(*getMRI()); 245 LLT EltTy = Ty.getScalarType(); 246 247 const ConstantInt *NewVal = &Val; 248 if (EltTy.getSizeInBits() != Val.getBitWidth()) { 249 NewVal = ConstantInt::get( 250 getMF().getFunction().getContext(), 251 Val.getValue().sextOrTrunc(EltTy.getSizeInBits())); 252 } 253 254 if (Ty.isVector()) { 255 unsigned EltReg = getMRI()->createGenericVirtualRegister(EltTy); 256 buildInstr(TargetOpcode::G_CONSTANT) 257 .addDef(EltReg) 258 .addCImm(NewVal); 259 260 auto MIB = buildInstr(TargetOpcode::G_BUILD_VECTOR); 261 Res.addDefToMIB(*getMRI(), MIB); 262 263 for (unsigned I = 0, E = Ty.getNumElements(); I != E; ++I) 264 MIB.addUse(EltReg); 265 return MIB; 266 } 267 268 auto MIB = buildInstr(TargetOpcode::G_CONSTANT); 269 Res.addDefToMIB(*getMRI(), MIB); 270 MIB.addCImm(NewVal); 271 return MIB; 272 } 273 274 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res, 275 int64_t Val) { 276 auto IntN = IntegerType::get(getMF().getFunction().getContext(), 277 Res.getLLTTy(*getMRI()).getSizeInBits()); 278 ConstantInt *CI = ConstantInt::get(IntN, Val, true); 279 return buildConstant(Res, *CI); 280 } 281 282 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res, 283 const ConstantFP &Val) { 284 LLT Ty = Res.getLLTTy(*getMRI()); 285 286 assert(!Ty.isPointer() && "invalid operand type"); 287 288 if (Ty.isVector()) { 289 unsigned EltReg 290 = getMRI()->createGenericVirtualRegister(Ty.getElementType()); 291 buildInstr(TargetOpcode::G_FCONSTANT) 292 .addDef(EltReg) 293 .addFPImm(&Val); 294 295 auto MIB = buildInstr(TargetOpcode::G_BUILD_VECTOR); 296 Res.addDefToMIB(*getMRI(), MIB); 297 298 for (unsigned I = 0, E = Ty.getNumElements(); I != E; ++I) 299 MIB.addUse(EltReg); 300 return MIB; 301 } 302 303 auto MIB = buildInstr(TargetOpcode::G_FCONSTANT); 304 Res.addDefToMIB(*getMRI(), MIB); 305 MIB.addFPImm(&Val); 306 return MIB; 307 } 308 309 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res, 310 double Val) { 311 LLT DstTy = Res.getLLTTy(*getMRI()); 312 auto &Ctx = getMF().getFunction().getContext(); 313 auto *CFP = 314 ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getSizeInBits())); 315 return buildFConstant(Res, *CFP); 316 } 317 318 MachineInstrBuilder MachineIRBuilder::buildBrCond(unsigned Tst, 319 MachineBasicBlock &Dest) { 320 assert(getMRI()->getType(Tst).isScalar() && "invalid operand type"); 321 322 return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest); 323 } 324 325 MachineInstrBuilder MachineIRBuilder::buildLoad(unsigned Res, unsigned Addr, 326 MachineMemOperand &MMO) { 327 return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO); 328 } 329 330 MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode, 331 unsigned Res, 332 unsigned Addr, 333 MachineMemOperand &MMO) { 334 assert(getMRI()->getType(Res).isValid() && "invalid operand type"); 335 assert(getMRI()->getType(Addr).isPointer() && "invalid operand type"); 336 337 return buildInstr(Opcode) 338 .addDef(Res) 339 .addUse(Addr) 340 .addMemOperand(&MMO); 341 } 342 343 MachineInstrBuilder MachineIRBuilder::buildStore(unsigned Val, unsigned Addr, 344 MachineMemOperand &MMO) { 345 assert(getMRI()->getType(Val).isValid() && "invalid operand type"); 346 assert(getMRI()->getType(Addr).isPointer() && "invalid operand type"); 347 348 return buildInstr(TargetOpcode::G_STORE) 349 .addUse(Val) 350 .addUse(Addr) 351 .addMemOperand(&MMO); 352 } 353 354 MachineInstrBuilder MachineIRBuilder::buildUAdde(const DstOp &Res, 355 const DstOp &CarryOut, 356 const SrcOp &Op0, 357 const SrcOp &Op1, 358 const SrcOp &CarryIn) { 359 return buildInstr(TargetOpcode::G_UADDE, {Res, CarryOut}, 360 {Op0, Op1, CarryIn}); 361 } 362 363 MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res, 364 const SrcOp &Op) { 365 return buildInstr(TargetOpcode::G_ANYEXT, Res, Op); 366 } 367 368 MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res, 369 const SrcOp &Op) { 370 return buildInstr(TargetOpcode::G_SEXT, Res, Op); 371 } 372 373 MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res, 374 const SrcOp &Op) { 375 return buildInstr(TargetOpcode::G_ZEXT, Res, Op); 376 } 377 378 MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc, 379 const DstOp &Res, 380 const SrcOp &Op) { 381 assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc || 382 TargetOpcode::G_SEXT == ExtOpc) && 383 "Expecting Extending Opc"); 384 assert(Res.getLLTTy(*getMRI()).isScalar() || 385 Res.getLLTTy(*getMRI()).isVector()); 386 assert(Res.getLLTTy(*getMRI()).isScalar() == 387 Op.getLLTTy(*getMRI()).isScalar()); 388 389 unsigned Opcode = TargetOpcode::COPY; 390 if (Res.getLLTTy(*getMRI()).getSizeInBits() > 391 Op.getLLTTy(*getMRI()).getSizeInBits()) 392 Opcode = ExtOpc; 393 else if (Res.getLLTTy(*getMRI()).getSizeInBits() < 394 Op.getLLTTy(*getMRI()).getSizeInBits()) 395 Opcode = TargetOpcode::G_TRUNC; 396 else 397 assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI())); 398 399 return buildInstr(Opcode, Res, Op); 400 } 401 402 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res, 403 const SrcOp &Op) { 404 return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op); 405 } 406 407 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res, 408 const SrcOp &Op) { 409 return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op); 410 } 411 412 MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res, 413 const SrcOp &Op) { 414 return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op); 415 } 416 417 MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst, 418 const SrcOp &Src) { 419 LLT SrcTy = Src.getLLTTy(*getMRI()); 420 LLT DstTy = Dst.getLLTTy(*getMRI()); 421 if (SrcTy == DstTy) 422 return buildCopy(Dst, Src); 423 424 unsigned Opcode; 425 if (SrcTy.isPointer() && DstTy.isScalar()) 426 Opcode = TargetOpcode::G_PTRTOINT; 427 else if (DstTy.isPointer() && SrcTy.isScalar()) 428 Opcode = TargetOpcode::G_INTTOPTR; 429 else { 430 assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet"); 431 Opcode = TargetOpcode::G_BITCAST; 432 } 433 434 return buildInstr(Opcode, Dst, Src); 435 } 436 437 MachineInstrBuilder MachineIRBuilder::buildExtract(unsigned Res, unsigned Src, 438 uint64_t Index) { 439 #ifndef NDEBUG 440 assert(getMRI()->getType(Src).isValid() && "invalid operand type"); 441 assert(getMRI()->getType(Res).isValid() && "invalid operand type"); 442 assert(Index + getMRI()->getType(Res).getSizeInBits() <= 443 getMRI()->getType(Src).getSizeInBits() && 444 "extracting off end of register"); 445 #endif 446 447 if (getMRI()->getType(Res).getSizeInBits() == 448 getMRI()->getType(Src).getSizeInBits()) { 449 assert(Index == 0 && "insertion past the end of a register"); 450 return buildCast(Res, Src); 451 } 452 453 return buildInstr(TargetOpcode::G_EXTRACT) 454 .addDef(Res) 455 .addUse(Src) 456 .addImm(Index); 457 } 458 459 void MachineIRBuilder::buildSequence(unsigned Res, ArrayRef<unsigned> Ops, 460 ArrayRef<uint64_t> Indices) { 461 #ifndef NDEBUG 462 assert(Ops.size() == Indices.size() && "incompatible args"); 463 assert(!Ops.empty() && "invalid trivial sequence"); 464 assert(std::is_sorted(Indices.begin(), Indices.end()) && 465 "sequence offsets must be in ascending order"); 466 467 assert(getMRI()->getType(Res).isValid() && "invalid operand type"); 468 for (auto Op : Ops) 469 assert(getMRI()->getType(Op).isValid() && "invalid operand type"); 470 #endif 471 472 LLT ResTy = getMRI()->getType(Res); 473 LLT OpTy = getMRI()->getType(Ops[0]); 474 unsigned OpSize = OpTy.getSizeInBits(); 475 bool MaybeMerge = true; 476 for (unsigned i = 0; i < Ops.size(); ++i) { 477 if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) { 478 MaybeMerge = false; 479 break; 480 } 481 } 482 483 if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) { 484 buildMerge(Res, Ops); 485 return; 486 } 487 488 unsigned ResIn = getMRI()->createGenericVirtualRegister(ResTy); 489 buildUndef(ResIn); 490 491 for (unsigned i = 0; i < Ops.size(); ++i) { 492 unsigned ResOut = i + 1 == Ops.size() 493 ? Res 494 : getMRI()->createGenericVirtualRegister(ResTy); 495 buildInsert(ResOut, ResIn, Ops[i], Indices[i]); 496 ResIn = ResOut; 497 } 498 } 499 500 MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) { 501 return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {}); 502 } 503 504 MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res, 505 ArrayRef<unsigned> Ops) { 506 // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>, 507 // we need some temporary storage for the DstOp objects. Here we use a 508 // sufficiently large SmallVector to not go through the heap. 509 SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end()); 510 return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec); 511 } 512 513 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res, 514 const SrcOp &Op) { 515 // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>, 516 // we need some temporary storage for the DstOp objects. Here we use a 517 // sufficiently large SmallVector to not go through the heap. 518 SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end()); 519 return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op); 520 } 521 522 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<unsigned> Res, 523 const SrcOp &Op) { 524 // Unfortunately to convert from ArrayRef<unsigned> to ArrayRef<DstOp>, 525 // we need some temporary storage for the DstOp objects. Here we use a 526 // sufficiently large SmallVector to not go through the heap. 527 SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end()); 528 return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op); 529 } 530 531 MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res, 532 ArrayRef<unsigned> Ops) { 533 // Unfortunately to convert from ArrayRef<unsigned> to ArrayRef<SrcOp>, 534 // we need some temporary storage for the DstOp objects. Here we use a 535 // sufficiently large SmallVector to not go through the heap. 536 SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end()); 537 return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec); 538 } 539 540 MachineInstrBuilder 541 MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res, 542 ArrayRef<unsigned> Ops) { 543 // Unfortunately to convert from ArrayRef<unsigned> to ArrayRef<SrcOp>, 544 // we need some temporary storage for the DstOp objects. Here we use a 545 // sufficiently large SmallVector to not go through the heap. 546 SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end()); 547 return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec); 548 } 549 550 MachineInstrBuilder 551 MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<unsigned> Ops) { 552 // Unfortunately to convert from ArrayRef<unsigned> to ArrayRef<SrcOp>, 553 // we need some temporary storage for the DstOp objects. Here we use a 554 // sufficiently large SmallVector to not go through the heap. 555 SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end()); 556 return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec); 557 } 558 559 MachineInstrBuilder MachineIRBuilder::buildInsert(unsigned Res, unsigned Src, 560 unsigned Op, unsigned Index) { 561 assert(Index + getMRI()->getType(Op).getSizeInBits() <= 562 getMRI()->getType(Res).getSizeInBits() && 563 "insertion past the end of a register"); 564 565 if (getMRI()->getType(Res).getSizeInBits() == 566 getMRI()->getType(Op).getSizeInBits()) { 567 return buildCast(Res, Op); 568 } 569 570 return buildInstr(TargetOpcode::G_INSERT) 571 .addDef(Res) 572 .addUse(Src) 573 .addUse(Op) 574 .addImm(Index); 575 } 576 577 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID, 578 unsigned Res, 579 bool HasSideEffects) { 580 auto MIB = 581 buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS 582 : TargetOpcode::G_INTRINSIC); 583 if (Res) 584 MIB.addDef(Res); 585 MIB.addIntrinsicID(ID); 586 return MIB; 587 } 588 589 MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res, 590 const SrcOp &Op) { 591 return buildInstr(TargetOpcode::G_TRUNC, Res, Op); 592 } 593 594 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res, 595 const SrcOp &Op) { 596 return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op); 597 } 598 599 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred, 600 const DstOp &Res, 601 const SrcOp &Op0, 602 const SrcOp &Op1) { 603 return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1}); 604 } 605 606 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred, 607 const DstOp &Res, 608 const SrcOp &Op0, 609 const SrcOp &Op1) { 610 611 return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}); 612 } 613 614 MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res, 615 const SrcOp &Tst, 616 const SrcOp &Op0, 617 const SrcOp &Op1) { 618 619 return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}); 620 } 621 622 MachineInstrBuilder 623 MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val, 624 const SrcOp &Elt, const SrcOp &Idx) { 625 return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx}); 626 } 627 628 MachineInstrBuilder 629 MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val, 630 const SrcOp &Idx) { 631 return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx}); 632 } 633 634 MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess( 635 unsigned OldValRes, unsigned SuccessRes, unsigned Addr, unsigned CmpVal, 636 unsigned NewVal, MachineMemOperand &MMO) { 637 #ifndef NDEBUG 638 LLT OldValResTy = getMRI()->getType(OldValRes); 639 LLT SuccessResTy = getMRI()->getType(SuccessRes); 640 LLT AddrTy = getMRI()->getType(Addr); 641 LLT CmpValTy = getMRI()->getType(CmpVal); 642 LLT NewValTy = getMRI()->getType(NewVal); 643 assert(OldValResTy.isScalar() && "invalid operand type"); 644 assert(SuccessResTy.isScalar() && "invalid operand type"); 645 assert(AddrTy.isPointer() && "invalid operand type"); 646 assert(CmpValTy.isValid() && "invalid operand type"); 647 assert(NewValTy.isValid() && "invalid operand type"); 648 assert(OldValResTy == CmpValTy && "type mismatch"); 649 assert(OldValResTy == NewValTy && "type mismatch"); 650 #endif 651 652 return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS) 653 .addDef(OldValRes) 654 .addDef(SuccessRes) 655 .addUse(Addr) 656 .addUse(CmpVal) 657 .addUse(NewVal) 658 .addMemOperand(&MMO); 659 } 660 661 MachineInstrBuilder 662 MachineIRBuilder::buildAtomicCmpXchg(unsigned OldValRes, unsigned Addr, 663 unsigned CmpVal, unsigned NewVal, 664 MachineMemOperand &MMO) { 665 #ifndef NDEBUG 666 LLT OldValResTy = getMRI()->getType(OldValRes); 667 LLT AddrTy = getMRI()->getType(Addr); 668 LLT CmpValTy = getMRI()->getType(CmpVal); 669 LLT NewValTy = getMRI()->getType(NewVal); 670 assert(OldValResTy.isScalar() && "invalid operand type"); 671 assert(AddrTy.isPointer() && "invalid operand type"); 672 assert(CmpValTy.isValid() && "invalid operand type"); 673 assert(NewValTy.isValid() && "invalid operand type"); 674 assert(OldValResTy == CmpValTy && "type mismatch"); 675 assert(OldValResTy == NewValTy && "type mismatch"); 676 #endif 677 678 return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG) 679 .addDef(OldValRes) 680 .addUse(Addr) 681 .addUse(CmpVal) 682 .addUse(NewVal) 683 .addMemOperand(&MMO); 684 } 685 686 MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(unsigned Opcode, 687 unsigned OldValRes, 688 unsigned Addr, 689 unsigned Val, 690 MachineMemOperand &MMO) { 691 #ifndef NDEBUG 692 LLT OldValResTy = getMRI()->getType(OldValRes); 693 LLT AddrTy = getMRI()->getType(Addr); 694 LLT ValTy = getMRI()->getType(Val); 695 assert(OldValResTy.isScalar() && "invalid operand type"); 696 assert(AddrTy.isPointer() && "invalid operand type"); 697 assert(ValTy.isValid() && "invalid operand type"); 698 assert(OldValResTy == ValTy && "type mismatch"); 699 #endif 700 701 return buildInstr(Opcode) 702 .addDef(OldValRes) 703 .addUse(Addr) 704 .addUse(Val) 705 .addMemOperand(&MMO); 706 } 707 708 MachineInstrBuilder 709 MachineIRBuilder::buildAtomicRMWXchg(unsigned OldValRes, unsigned Addr, 710 unsigned Val, MachineMemOperand &MMO) { 711 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val, 712 MMO); 713 } 714 MachineInstrBuilder 715 MachineIRBuilder::buildAtomicRMWAdd(unsigned OldValRes, unsigned Addr, 716 unsigned Val, MachineMemOperand &MMO) { 717 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val, 718 MMO); 719 } 720 MachineInstrBuilder 721 MachineIRBuilder::buildAtomicRMWSub(unsigned OldValRes, unsigned Addr, 722 unsigned Val, MachineMemOperand &MMO) { 723 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val, 724 MMO); 725 } 726 MachineInstrBuilder 727 MachineIRBuilder::buildAtomicRMWAnd(unsigned OldValRes, unsigned Addr, 728 unsigned Val, MachineMemOperand &MMO) { 729 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val, 730 MMO); 731 } 732 MachineInstrBuilder 733 MachineIRBuilder::buildAtomicRMWNand(unsigned OldValRes, unsigned Addr, 734 unsigned Val, MachineMemOperand &MMO) { 735 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val, 736 MMO); 737 } 738 MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(unsigned OldValRes, 739 unsigned Addr, 740 unsigned Val, 741 MachineMemOperand &MMO) { 742 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val, 743 MMO); 744 } 745 MachineInstrBuilder 746 MachineIRBuilder::buildAtomicRMWXor(unsigned OldValRes, unsigned Addr, 747 unsigned Val, MachineMemOperand &MMO) { 748 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val, 749 MMO); 750 } 751 MachineInstrBuilder 752 MachineIRBuilder::buildAtomicRMWMax(unsigned OldValRes, unsigned Addr, 753 unsigned Val, MachineMemOperand &MMO) { 754 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val, 755 MMO); 756 } 757 MachineInstrBuilder 758 MachineIRBuilder::buildAtomicRMWMin(unsigned OldValRes, unsigned Addr, 759 unsigned Val, MachineMemOperand &MMO) { 760 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val, 761 MMO); 762 } 763 MachineInstrBuilder 764 MachineIRBuilder::buildAtomicRMWUmax(unsigned OldValRes, unsigned Addr, 765 unsigned Val, MachineMemOperand &MMO) { 766 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val, 767 MMO); 768 } 769 MachineInstrBuilder 770 MachineIRBuilder::buildAtomicRMWUmin(unsigned OldValRes, unsigned Addr, 771 unsigned Val, MachineMemOperand &MMO) { 772 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val, 773 MMO); 774 } 775 776 MachineInstrBuilder 777 MachineIRBuilder::buildBlockAddress(unsigned Res, const BlockAddress *BA) { 778 #ifndef NDEBUG 779 assert(getMRI()->getType(Res).isPointer() && "invalid res type"); 780 #endif 781 782 return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA); 783 } 784 785 void MachineIRBuilder::validateTruncExt(const LLT &DstTy, const LLT &SrcTy, 786 bool IsExtend) { 787 #ifndef NDEBUG 788 if (DstTy.isVector()) { 789 assert(SrcTy.isVector() && "mismatched cast between vector and non-vector"); 790 assert(SrcTy.getNumElements() == DstTy.getNumElements() && 791 "different number of elements in a trunc/ext"); 792 } else 793 assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc"); 794 795 if (IsExtend) 796 assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() && 797 "invalid narrowing extend"); 798 else 799 assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() && 800 "invalid widening trunc"); 801 #endif 802 } 803 804 void MachineIRBuilder::validateSelectOp(const LLT &ResTy, const LLT &TstTy, 805 const LLT &Op0Ty, const LLT &Op1Ty) { 806 #ifndef NDEBUG 807 assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) && 808 "invalid operand type"); 809 assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch"); 810 if (ResTy.isScalar() || ResTy.isPointer()) 811 assert(TstTy.isScalar() && "type mismatch"); 812 else 813 assert((TstTy.isScalar() || 814 (TstTy.isVector() && 815 TstTy.getNumElements() == Op0Ty.getNumElements())) && 816 "type mismatch"); 817 #endif 818 } 819 820 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc, 821 ArrayRef<DstOp> DstOps, 822 ArrayRef<SrcOp> SrcOps, 823 Optional<unsigned> Flags) { 824 switch (Opc) { 825 default: 826 break; 827 case TargetOpcode::G_SELECT: { 828 assert(DstOps.size() == 1 && "Invalid select"); 829 assert(SrcOps.size() == 3 && "Invalid select"); 830 validateSelectOp( 831 DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()), 832 SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI())); 833 break; 834 } 835 case TargetOpcode::G_ADD: 836 case TargetOpcode::G_AND: 837 case TargetOpcode::G_ASHR: 838 case TargetOpcode::G_LSHR: 839 case TargetOpcode::G_MUL: 840 case TargetOpcode::G_OR: 841 case TargetOpcode::G_SHL: 842 case TargetOpcode::G_SUB: 843 case TargetOpcode::G_XOR: 844 case TargetOpcode::G_UDIV: 845 case TargetOpcode::G_SDIV: 846 case TargetOpcode::G_UREM: 847 case TargetOpcode::G_SREM: { 848 // All these are binary ops. 849 assert(DstOps.size() == 1 && "Invalid Dst"); 850 assert(SrcOps.size() == 2 && "Invalid Srcs"); 851 validateBinaryOp(DstOps[0].getLLTTy(*getMRI()), 852 SrcOps[0].getLLTTy(*getMRI()), 853 SrcOps[1].getLLTTy(*getMRI())); 854 break; 855 case TargetOpcode::G_SEXT: 856 case TargetOpcode::G_ZEXT: 857 case TargetOpcode::G_ANYEXT: 858 assert(DstOps.size() == 1 && "Invalid Dst"); 859 assert(SrcOps.size() == 1 && "Invalid Srcs"); 860 validateTruncExt(DstOps[0].getLLTTy(*getMRI()), 861 SrcOps[0].getLLTTy(*getMRI()), true); 862 break; 863 case TargetOpcode::G_TRUNC: 864 case TargetOpcode::G_FPTRUNC: 865 assert(DstOps.size() == 1 && "Invalid Dst"); 866 assert(SrcOps.size() == 1 && "Invalid Srcs"); 867 validateTruncExt(DstOps[0].getLLTTy(*getMRI()), 868 SrcOps[0].getLLTTy(*getMRI()), false); 869 break; 870 } 871 case TargetOpcode::COPY: 872 assert(DstOps.size() == 1 && "Invalid Dst"); 873 assert(SrcOps.size() == 1 && "Invalid Srcs"); 874 assert(DstOps[0].getLLTTy(*getMRI()) == LLT() || 875 SrcOps[0].getLLTTy(*getMRI()) == LLT() || 876 DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())); 877 break; 878 case TargetOpcode::G_FCMP: 879 case TargetOpcode::G_ICMP: { 880 assert(DstOps.size() == 1 && "Invalid Dst Operands"); 881 assert(SrcOps.size() == 3 && "Invalid Src Operands"); 882 // For F/ICMP, the first src operand is the predicate, followed by 883 // the two comparands. 884 assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate && 885 "Expecting predicate"); 886 assert([&]() -> bool { 887 CmpInst::Predicate Pred = SrcOps[0].getPredicate(); 888 return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred) 889 : CmpInst::isFPPredicate(Pred); 890 }() && "Invalid predicate"); 891 assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) && 892 "Type mismatch"); 893 assert([&]() -> bool { 894 LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI()); 895 LLT DstTy = DstOps[0].getLLTTy(*getMRI()); 896 if (Op0Ty.isScalar() || Op0Ty.isPointer()) 897 return DstTy.isScalar(); 898 else 899 return DstTy.isVector() && 900 DstTy.getNumElements() == Op0Ty.getNumElements(); 901 }() && "Type Mismatch"); 902 break; 903 } 904 case TargetOpcode::G_UNMERGE_VALUES: { 905 assert(!DstOps.empty() && "Invalid trivial sequence"); 906 assert(SrcOps.size() == 1 && "Invalid src for Unmerge"); 907 assert(std::all_of(DstOps.begin(), DstOps.end(), 908 [&, this](const DstOp &Op) { 909 return Op.getLLTTy(*getMRI()) == 910 DstOps[0].getLLTTy(*getMRI()); 911 }) && 912 "type mismatch in output list"); 913 assert(DstOps.size() * DstOps[0].getLLTTy(*getMRI()).getSizeInBits() == 914 SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() && 915 "input operands do not cover output register"); 916 break; 917 } 918 case TargetOpcode::G_MERGE_VALUES: { 919 assert(!SrcOps.empty() && "invalid trivial sequence"); 920 assert(DstOps.size() == 1 && "Invalid Dst"); 921 assert(std::all_of(SrcOps.begin(), SrcOps.end(), 922 [&, this](const SrcOp &Op) { 923 return Op.getLLTTy(*getMRI()) == 924 SrcOps[0].getLLTTy(*getMRI()); 925 }) && 926 "type mismatch in input list"); 927 assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() == 928 DstOps[0].getLLTTy(*getMRI()).getSizeInBits() && 929 "input operands do not cover output register"); 930 if (SrcOps.size() == 1) 931 return buildCast(DstOps[0], SrcOps[0]); 932 if (DstOps[0].getLLTTy(*getMRI()).isVector()) 933 return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps); 934 break; 935 } 936 case TargetOpcode::G_EXTRACT_VECTOR_ELT: { 937 assert(DstOps.size() == 1 && "Invalid Dst size"); 938 assert(SrcOps.size() == 2 && "Invalid Src size"); 939 assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type"); 940 assert((DstOps[0].getLLTTy(*getMRI()).isScalar() || 941 DstOps[0].getLLTTy(*getMRI()).isPointer()) && 942 "Invalid operand type"); 943 assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type"); 944 assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() == 945 DstOps[0].getLLTTy(*getMRI()) && 946 "Type mismatch"); 947 break; 948 } 949 case TargetOpcode::G_INSERT_VECTOR_ELT: { 950 assert(DstOps.size() == 1 && "Invalid dst size"); 951 assert(SrcOps.size() == 3 && "Invalid src size"); 952 assert(DstOps[0].getLLTTy(*getMRI()).isVector() && 953 SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type"); 954 assert(DstOps[0].getLLTTy(*getMRI()).getElementType() == 955 SrcOps[1].getLLTTy(*getMRI()) && 956 "Type mismatch"); 957 assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index"); 958 assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() == 959 SrcOps[0].getLLTTy(*getMRI()).getNumElements() && 960 "Type mismatch"); 961 break; 962 } 963 case TargetOpcode::G_BUILD_VECTOR: { 964 assert((!SrcOps.empty() || SrcOps.size() < 2) && 965 "Must have at least 2 operands"); 966 assert(DstOps.size() == 1 && "Invalid DstOps"); 967 assert(DstOps[0].getLLTTy(*getMRI()).isVector() && 968 "Res type must be a vector"); 969 assert(std::all_of(SrcOps.begin(), SrcOps.end(), 970 [&, this](const SrcOp &Op) { 971 return Op.getLLTTy(*getMRI()) == 972 SrcOps[0].getLLTTy(*getMRI()); 973 }) && 974 "type mismatch in input list"); 975 assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() == 976 DstOps[0].getLLTTy(*getMRI()).getSizeInBits() && 977 "input scalars do not exactly cover the outpur vector register"); 978 break; 979 } 980 case TargetOpcode::G_BUILD_VECTOR_TRUNC: { 981 assert((!SrcOps.empty() || SrcOps.size() < 2) && 982 "Must have at least 2 operands"); 983 assert(DstOps.size() == 1 && "Invalid DstOps"); 984 assert(DstOps[0].getLLTTy(*getMRI()).isVector() && 985 "Res type must be a vector"); 986 assert(std::all_of(SrcOps.begin(), SrcOps.end(), 987 [&, this](const SrcOp &Op) { 988 return Op.getLLTTy(*getMRI()) == 989 SrcOps[0].getLLTTy(*getMRI()); 990 }) && 991 "type mismatch in input list"); 992 if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() == 993 DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits()) 994 return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps); 995 break; 996 } 997 case TargetOpcode::G_CONCAT_VECTORS: { 998 assert(DstOps.size() == 1 && "Invalid DstOps"); 999 assert((!SrcOps.empty() || SrcOps.size() < 2) && 1000 "Must have at least 2 operands"); 1001 assert(std::all_of(SrcOps.begin(), SrcOps.end(), 1002 [&, this](const SrcOp &Op) { 1003 return (Op.getLLTTy(*getMRI()).isVector() && 1004 Op.getLLTTy(*getMRI()) == 1005 SrcOps[0].getLLTTy(*getMRI())); 1006 }) && 1007 "type mismatch in input list"); 1008 assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() == 1009 DstOps[0].getLLTTy(*getMRI()).getSizeInBits() && 1010 "input vectors do not exactly cover the outpur vector register"); 1011 break; 1012 } 1013 case TargetOpcode::G_UADDE: { 1014 assert(DstOps.size() == 2 && "Invalid no of dst operands"); 1015 assert(SrcOps.size() == 3 && "Invalid no of src operands"); 1016 assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand"); 1017 assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) && 1018 (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) && 1019 "Invalid operand"); 1020 assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand"); 1021 assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) && 1022 "type mismatch"); 1023 break; 1024 } 1025 } 1026 1027 auto MIB = buildInstr(Opc); 1028 for (const DstOp &Op : DstOps) 1029 Op.addDefToMIB(*getMRI(), MIB); 1030 for (const SrcOp &Op : SrcOps) 1031 Op.addSrcToMIB(MIB); 1032 if (Flags) 1033 MIB->setFlags(*Flags); 1034 return MIB; 1035 } 1036