1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 /// \file 10 /// This file implements the MachineIRBuidler class. 11 //===----------------------------------------------------------------------===// 12 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h" 13 14 #include "llvm/CodeGen/MachineFunction.h" 15 #include "llvm/CodeGen/MachineInstr.h" 16 #include "llvm/CodeGen/MachineInstrBuilder.h" 17 #include "llvm/CodeGen/MachineRegisterInfo.h" 18 #include "llvm/IR/DebugInfo.h" 19 #include "llvm/Target/TargetInstrInfo.h" 20 #include "llvm/Target/TargetOpcodes.h" 21 #include "llvm/Target/TargetSubtargetInfo.h" 22 23 using namespace llvm; 24 25 void MachineIRBuilder::setMF(MachineFunction &MF) { 26 this->MF = &MF; 27 this->MBB = nullptr; 28 this->MRI = &MF.getRegInfo(); 29 this->TII = MF.getSubtarget().getInstrInfo(); 30 this->DL = DebugLoc(); 31 this->II = MachineBasicBlock::iterator(); 32 this->InsertedInstr = nullptr; 33 } 34 35 void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) { 36 this->MBB = &MBB; 37 this->II = MBB.end(); 38 assert(&getMF() == MBB.getParent() && 39 "Basic block is in a different function"); 40 } 41 42 void MachineIRBuilder::setInstr(MachineInstr &MI) { 43 assert(MI.getParent() && "Instruction is not part of a basic block"); 44 setMBB(*MI.getParent()); 45 this->II = MI.getIterator(); 46 } 47 48 void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB, 49 MachineBasicBlock::iterator II) { 50 assert(MBB.getParent() == &getMF() && 51 "Basic block is in a different function"); 52 this->MBB = &MBB; 53 this->II = II; 54 } 55 56 void MachineIRBuilder::recordInsertions( 57 std::function<void(MachineInstr *)> Inserted) { 58 InsertedInstr = std::move(Inserted); 59 } 60 61 void MachineIRBuilder::stopRecordingInsertions() { 62 InsertedInstr = nullptr; 63 } 64 65 //------------------------------------------------------------------------------ 66 // Build instruction variants. 67 //------------------------------------------------------------------------------ 68 69 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) { 70 return insertInstr(buildInstrNoInsert(Opcode)); 71 } 72 73 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) { 74 MachineInstrBuilder MIB = BuildMI(getMF(), DL, getTII().get(Opcode)); 75 return MIB; 76 } 77 78 79 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) { 80 getMBB().insert(getInsertPt(), MIB); 81 if (InsertedInstr) 82 InsertedInstr(MIB); 83 return MIB; 84 } 85 86 MachineInstrBuilder MachineIRBuilder::buildDirectDbgValue( 87 unsigned Reg, const MDNode *Variable, const MDNode *Expr) { 88 assert(isa<DILocalVariable>(Variable) && "not a variable"); 89 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 90 assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && 91 "Expected inlined-at fields to agree"); 92 return buildInstr(TargetOpcode::DBG_VALUE) 93 .addReg(Reg, RegState::Debug) 94 .addReg(0, RegState::Debug) 95 .addMetadata(Variable) 96 .addMetadata(Expr); 97 } 98 99 MachineInstrBuilder MachineIRBuilder::buildIndirectDbgValue( 100 unsigned Reg, unsigned Offset, const MDNode *Variable, const MDNode *Expr) { 101 assert(isa<DILocalVariable>(Variable) && "not a variable"); 102 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 103 assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && 104 "Expected inlined-at fields to agree"); 105 return buildInstr(TargetOpcode::DBG_VALUE) 106 .addReg(Reg, RegState::Debug) 107 .addImm(Offset) 108 .addMetadata(Variable) 109 .addMetadata(Expr); 110 } 111 112 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI, 113 const MDNode *Variable, 114 const MDNode *Expr) { 115 assert(isa<DILocalVariable>(Variable) && "not a variable"); 116 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 117 assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && 118 "Expected inlined-at fields to agree"); 119 return buildInstr(TargetOpcode::DBG_VALUE) 120 .addFrameIndex(FI) 121 .addImm(0) 122 .addMetadata(Variable) 123 .addMetadata(Expr); 124 } 125 126 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C, 127 unsigned Offset, 128 const MDNode *Variable, 129 const MDNode *Expr) { 130 assert(isa<DILocalVariable>(Variable) && "not a variable"); 131 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 132 assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && 133 "Expected inlined-at fields to agree"); 134 auto MIB = buildInstr(TargetOpcode::DBG_VALUE); 135 if (auto *CI = dyn_cast<ConstantInt>(&C)) { 136 if (CI->getBitWidth() > 64) 137 MIB.addCImm(CI); 138 else 139 MIB.addImm(CI->getZExtValue()); 140 } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) { 141 MIB.addFPImm(CFP); 142 } else { 143 // Insert %noreg if we didn't find a usable constant and had to drop it. 144 MIB.addReg(0U); 145 } 146 147 return MIB.addImm(Offset).addMetadata(Variable).addMetadata(Expr); 148 } 149 150 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(unsigned Res, int Idx) { 151 assert(MRI->getType(Res).isPointer() && "invalid operand type"); 152 return buildInstr(TargetOpcode::G_FRAME_INDEX) 153 .addDef(Res) 154 .addFrameIndex(Idx); 155 } 156 157 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(unsigned Res, 158 const GlobalValue *GV) { 159 assert(MRI->getType(Res).isPointer() && "invalid operand type"); 160 assert(MRI->getType(Res).getAddressSpace() == 161 GV->getType()->getAddressSpace() && 162 "address space mismatch"); 163 164 return buildInstr(TargetOpcode::G_GLOBAL_VALUE) 165 .addDef(Res) 166 .addGlobalAddress(GV); 167 } 168 169 MachineInstrBuilder MachineIRBuilder::buildAdd(unsigned Res, unsigned Op0, 170 unsigned Op1) { 171 assert((MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()) && 172 "invalid operand type"); 173 assert(MRI->getType(Res) == MRI->getType(Op0) && 174 MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); 175 176 return buildInstr(TargetOpcode::G_ADD) 177 .addDef(Res) 178 .addUse(Op0) 179 .addUse(Op1); 180 } 181 182 MachineInstrBuilder MachineIRBuilder::buildGEP(unsigned Res, unsigned Op0, 183 unsigned Op1) { 184 assert(MRI->getType(Res).isPointer() && 185 MRI->getType(Res) == MRI->getType(Op0) && "type mismatch"); 186 assert(MRI->getType(Op1).isScalar() && "invalid offset type"); 187 188 return buildInstr(TargetOpcode::G_GEP) 189 .addDef(Res) 190 .addUse(Op0) 191 .addUse(Op1); 192 } 193 194 Optional<MachineInstrBuilder> 195 MachineIRBuilder::materializeGEP(unsigned &Res, unsigned Op0, 196 const LLT &ValueTy, uint64_t Value) { 197 assert(Res == 0 && "Res is a result argument"); 198 assert(ValueTy.isScalar() && "invalid offset type"); 199 200 if (Value == 0) { 201 Res = Op0; 202 return None; 203 } 204 205 Res = MRI->createGenericVirtualRegister(MRI->getType(Op0)); 206 unsigned TmpReg = MRI->createGenericVirtualRegister(ValueTy); 207 208 buildConstant(TmpReg, Value); 209 return buildGEP(Res, Op0, TmpReg); 210 } 211 212 MachineInstrBuilder MachineIRBuilder::buildPtrMask(unsigned Res, unsigned Op0, 213 uint32_t NumBits) { 214 assert(MRI->getType(Res).isPointer() && 215 MRI->getType(Res) == MRI->getType(Op0) && "type mismatch"); 216 217 return buildInstr(TargetOpcode::G_PTR_MASK) 218 .addDef(Res) 219 .addUse(Op0) 220 .addImm(NumBits); 221 } 222 223 MachineInstrBuilder MachineIRBuilder::buildSub(unsigned Res, unsigned Op0, 224 unsigned Op1) { 225 assert((MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()) && 226 "invalid operand type"); 227 assert(MRI->getType(Res) == MRI->getType(Op0) && 228 MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); 229 230 return buildInstr(TargetOpcode::G_SUB) 231 .addDef(Res) 232 .addUse(Op0) 233 .addUse(Op1); 234 } 235 236 MachineInstrBuilder MachineIRBuilder::buildMul(unsigned Res, unsigned Op0, 237 unsigned Op1) { 238 assert((MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()) && 239 "invalid operand type"); 240 assert(MRI->getType(Res) == MRI->getType(Op0) && 241 MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); 242 243 return buildInstr(TargetOpcode::G_MUL) 244 .addDef(Res) 245 .addUse(Op0) 246 .addUse(Op1); 247 } 248 249 MachineInstrBuilder MachineIRBuilder::buildAnd(unsigned Res, unsigned Op0, 250 unsigned Op1) { 251 assert((MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()) && 252 "invalid operand type"); 253 assert(MRI->getType(Res) == MRI->getType(Op0) && 254 MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); 255 256 return buildInstr(TargetOpcode::G_AND) 257 .addDef(Res) 258 .addUse(Op0) 259 .addUse(Op1); 260 } 261 262 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) { 263 return buildInstr(TargetOpcode::G_BR).addMBB(&Dest); 264 } 265 266 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(unsigned Tgt) { 267 assert(MRI->getType(Tgt).isPointer() && "invalid branch destination"); 268 return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt); 269 } 270 271 MachineInstrBuilder MachineIRBuilder::buildCopy(unsigned Res, unsigned Op) { 272 assert(MRI->getType(Res) == LLT() || MRI->getType(Op) == LLT() || 273 MRI->getType(Res) == MRI->getType(Op)); 274 return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op); 275 } 276 277 MachineInstrBuilder MachineIRBuilder::buildConstant(unsigned Res, 278 const ConstantInt &Val) { 279 LLT Ty = MRI->getType(Res); 280 281 assert((Ty.isScalar() || Ty.isPointer()) && "invalid operand type"); 282 283 const ConstantInt *NewVal = &Val; 284 if (Ty.getSizeInBits() != Val.getBitWidth()) 285 NewVal = ConstantInt::get(MF->getFunction()->getContext(), 286 Val.getValue().sextOrTrunc(Ty.getSizeInBits())); 287 288 return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addCImm(NewVal); 289 } 290 291 MachineInstrBuilder MachineIRBuilder::buildConstant(unsigned Res, 292 int64_t Val) { 293 auto IntN = IntegerType::get(MF->getFunction()->getContext(), 294 MRI->getType(Res).getSizeInBits()); 295 ConstantInt *CI = ConstantInt::get(IntN, Val, true); 296 return buildConstant(Res, *CI); 297 } 298 299 MachineInstrBuilder MachineIRBuilder::buildFConstant(unsigned Res, 300 const ConstantFP &Val) { 301 assert(MRI->getType(Res).isScalar() && "invalid operand type"); 302 303 return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val); 304 } 305 306 MachineInstrBuilder MachineIRBuilder::buildBrCond(unsigned Tst, 307 MachineBasicBlock &Dest) { 308 assert(MRI->getType(Tst).isScalar() && "invalid operand type"); 309 310 return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest); 311 } 312 313 MachineInstrBuilder MachineIRBuilder::buildLoad(unsigned Res, unsigned Addr, 314 MachineMemOperand &MMO) { 315 assert(MRI->getType(Res).isValid() && "invalid operand type"); 316 assert(MRI->getType(Addr).isPointer() && "invalid operand type"); 317 318 return buildInstr(TargetOpcode::G_LOAD) 319 .addDef(Res) 320 .addUse(Addr) 321 .addMemOperand(&MMO); 322 } 323 324 MachineInstrBuilder MachineIRBuilder::buildStore(unsigned Val, unsigned Addr, 325 MachineMemOperand &MMO) { 326 assert(MRI->getType(Val).isValid() && "invalid operand type"); 327 assert(MRI->getType(Addr).isPointer() && "invalid operand type"); 328 329 return buildInstr(TargetOpcode::G_STORE) 330 .addUse(Val) 331 .addUse(Addr) 332 .addMemOperand(&MMO); 333 } 334 335 MachineInstrBuilder MachineIRBuilder::buildUAdde(unsigned Res, 336 unsigned CarryOut, 337 unsigned Op0, unsigned Op1, 338 unsigned CarryIn) { 339 assert(MRI->getType(Res).isScalar() && "invalid operand type"); 340 assert(MRI->getType(Res) == MRI->getType(Op0) && 341 MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); 342 assert(MRI->getType(CarryOut).isScalar() && "invalid operand type"); 343 assert(MRI->getType(CarryOut) == MRI->getType(CarryIn) && "type mismatch"); 344 345 return buildInstr(TargetOpcode::G_UADDE) 346 .addDef(Res) 347 .addDef(CarryOut) 348 .addUse(Op0) 349 .addUse(Op1) 350 .addUse(CarryIn); 351 } 352 353 MachineInstrBuilder MachineIRBuilder::buildAnyExt(unsigned Res, unsigned Op) { 354 validateTruncExt(Res, Op, true); 355 return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op); 356 } 357 358 MachineInstrBuilder MachineIRBuilder::buildSExt(unsigned Res, unsigned Op) { 359 validateTruncExt(Res, Op, true); 360 return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op); 361 } 362 363 MachineInstrBuilder MachineIRBuilder::buildZExt(unsigned Res, unsigned Op) { 364 validateTruncExt(Res, Op, true); 365 return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op); 366 } 367 368 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(unsigned Res, 369 unsigned Op) { 370 assert(MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()); 371 assert(MRI->getType(Res).isScalar() == MRI->getType(Op).isScalar()); 372 373 unsigned Opcode = TargetOpcode::COPY; 374 if (MRI->getType(Res).getSizeInBits() > MRI->getType(Op).getSizeInBits()) 375 Opcode = TargetOpcode::G_SEXT; 376 else if (MRI->getType(Res).getSizeInBits() < MRI->getType(Op).getSizeInBits()) 377 Opcode = TargetOpcode::G_TRUNC; 378 else 379 assert(MRI->getType(Res) == MRI->getType(Op)); 380 381 return buildInstr(Opcode).addDef(Res).addUse(Op); 382 } 383 384 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(unsigned Res, 385 unsigned Op) { 386 assert(MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()); 387 assert(MRI->getType(Res).isScalar() == MRI->getType(Op).isScalar()); 388 389 unsigned Opcode = TargetOpcode::COPY; 390 if (MRI->getType(Res).getSizeInBits() > MRI->getType(Op).getSizeInBits()) 391 Opcode = TargetOpcode::G_ZEXT; 392 else if (MRI->getType(Res).getSizeInBits() < MRI->getType(Op).getSizeInBits()) 393 Opcode = TargetOpcode::G_TRUNC; 394 else 395 assert(MRI->getType(Res) == MRI->getType(Op)); 396 397 return buildInstr(Opcode).addDef(Res).addUse(Op); 398 } 399 400 MachineInstrBuilder MachineIRBuilder::buildCast(unsigned Dst, unsigned Src) { 401 LLT SrcTy = MRI->getType(Src); 402 LLT DstTy = MRI->getType(Dst); 403 if (SrcTy == DstTy) 404 return buildCopy(Dst, Src); 405 406 unsigned Opcode; 407 if (SrcTy.isPointer() && DstTy.isScalar()) 408 Opcode = TargetOpcode::G_PTRTOINT; 409 else if (DstTy.isPointer() && SrcTy.isScalar()) 410 Opcode = TargetOpcode::G_INTTOPTR; 411 else { 412 assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet"); 413 Opcode = TargetOpcode::G_BITCAST; 414 } 415 416 return buildInstr(Opcode).addDef(Dst).addUse(Src); 417 } 418 419 MachineInstrBuilder MachineIRBuilder::buildExtract(unsigned Res, unsigned Src, 420 uint64_t Index) { 421 #ifndef NDEBUG 422 assert(MRI->getType(Src).isValid() && "invalid operand type"); 423 assert(MRI->getType(Res).isValid() && "invalid operand type"); 424 assert(Index + MRI->getType(Res).getSizeInBits() <= 425 MRI->getType(Src).getSizeInBits() && 426 "extracting off end of register"); 427 #endif 428 429 if (MRI->getType(Res).getSizeInBits() == MRI->getType(Src).getSizeInBits()) { 430 assert(Index == 0 && "insertion past the end of a register"); 431 return buildCast(Res, Src); 432 } 433 434 return buildInstr(TargetOpcode::G_EXTRACT) 435 .addDef(Res) 436 .addUse(Src) 437 .addImm(Index); 438 } 439 440 void MachineIRBuilder::buildSequence(unsigned Res, ArrayRef<unsigned> Ops, 441 ArrayRef<uint64_t> Indices) { 442 #ifndef NDEBUG 443 assert(Ops.size() == Indices.size() && "incompatible args"); 444 assert(!Ops.empty() && "invalid trivial sequence"); 445 assert(std::is_sorted(Indices.begin(), Indices.end()) && 446 "sequence offsets must be in ascending order"); 447 448 assert(MRI->getType(Res).isValid() && "invalid operand type"); 449 for (auto Op : Ops) 450 assert(MRI->getType(Op).isValid() && "invalid operand type"); 451 #endif 452 453 LLT ResTy = MRI->getType(Res); 454 LLT OpTy = MRI->getType(Ops[0]); 455 unsigned OpSize = OpTy.getSizeInBits(); 456 bool MaybeMerge = true; 457 for (unsigned i = 0; i < Ops.size(); ++i) { 458 if (MRI->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) { 459 MaybeMerge = false; 460 break; 461 } 462 } 463 464 if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) { 465 buildMerge(Res, Ops); 466 return; 467 } 468 469 unsigned ResIn = MRI->createGenericVirtualRegister(ResTy); 470 buildUndef(ResIn); 471 472 for (unsigned i = 0; i < Ops.size(); ++i) { 473 unsigned ResOut = 474 i + 1 == Ops.size() ? Res : MRI->createGenericVirtualRegister(ResTy); 475 buildInsert(ResOut, ResIn, Ops[i], Indices[i]); 476 ResIn = ResOut; 477 } 478 } 479 480 MachineInstrBuilder MachineIRBuilder::buildUndef(unsigned Res) { 481 return buildInstr(TargetOpcode::IMPLICIT_DEF).addDef(Res); 482 } 483 484 MachineInstrBuilder MachineIRBuilder::buildMerge(unsigned Res, 485 ArrayRef<unsigned> Ops) { 486 487 #ifndef NDEBUG 488 assert(!Ops.empty() && "invalid trivial sequence"); 489 LLT Ty = MRI->getType(Ops[0]); 490 for (auto Reg : Ops) 491 assert(MRI->getType(Reg) == Ty && "type mismatch in input list"); 492 assert(Ops.size() * MRI->getType(Ops[0]).getSizeInBits() == 493 MRI->getType(Res).getSizeInBits() && 494 "input operands do not cover output register"); 495 #endif 496 497 if (Ops.size() == 1) 498 return buildCast(Res, Ops[0]); 499 500 MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_MERGE_VALUES); 501 MIB.addDef(Res); 502 for (unsigned i = 0; i < Ops.size(); ++i) 503 MIB.addUse(Ops[i]); 504 return MIB; 505 } 506 507 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<unsigned> Res, 508 unsigned Op) { 509 510 #ifndef NDEBUG 511 assert(!Res.empty() && "invalid trivial sequence"); 512 LLT Ty = MRI->getType(Res[0]); 513 for (auto Reg : Res) 514 assert(MRI->getType(Reg) == Ty && "type mismatch in input list"); 515 assert(Res.size() * MRI->getType(Res[0]).getSizeInBits() == 516 MRI->getType(Op).getSizeInBits() && 517 "input operands do not cover output register"); 518 #endif 519 520 MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_UNMERGE_VALUES); 521 for (unsigned i = 0; i < Res.size(); ++i) 522 MIB.addDef(Res[i]); 523 MIB.addUse(Op); 524 return MIB; 525 } 526 527 MachineInstrBuilder MachineIRBuilder::buildInsert(unsigned Res, unsigned Src, 528 unsigned Op, unsigned Index) { 529 assert(Index + MRI->getType(Op).getSizeInBits() <= 530 MRI->getType(Res).getSizeInBits() && 531 "insertion past the end of a register"); 532 533 if (MRI->getType(Res).getSizeInBits() == MRI->getType(Op).getSizeInBits()) { 534 return buildCast(Res, Op); 535 } 536 537 return buildInstr(TargetOpcode::G_INSERT) 538 .addDef(Res) 539 .addUse(Src) 540 .addUse(Op) 541 .addImm(Index); 542 } 543 544 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID, 545 unsigned Res, 546 bool HasSideEffects) { 547 auto MIB = 548 buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS 549 : TargetOpcode::G_INTRINSIC); 550 if (Res) 551 MIB.addDef(Res); 552 MIB.addIntrinsicID(ID); 553 return MIB; 554 } 555 556 MachineInstrBuilder MachineIRBuilder::buildTrunc(unsigned Res, unsigned Op) { 557 validateTruncExt(Res, Op, false); 558 return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op); 559 } 560 561 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(unsigned Res, unsigned Op) { 562 validateTruncExt(Res, Op, false); 563 return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op); 564 } 565 566 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred, 567 unsigned Res, unsigned Op0, 568 unsigned Op1) { 569 #ifndef NDEBUG 570 assert(MRI->getType(Op0) == MRI->getType(Op0) && "type mismatch"); 571 assert(CmpInst::isIntPredicate(Pred) && "invalid predicate"); 572 if (MRI->getType(Op0).isScalar() || MRI->getType(Op0).isPointer()) 573 assert(MRI->getType(Res).isScalar() && "type mismatch"); 574 else 575 assert(MRI->getType(Res).isVector() && 576 MRI->getType(Res).getNumElements() == 577 MRI->getType(Op0).getNumElements() && 578 "type mismatch"); 579 #endif 580 581 return buildInstr(TargetOpcode::G_ICMP) 582 .addDef(Res) 583 .addPredicate(Pred) 584 .addUse(Op0) 585 .addUse(Op1); 586 } 587 588 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred, 589 unsigned Res, unsigned Op0, 590 unsigned Op1) { 591 #ifndef NDEBUG 592 assert((MRI->getType(Op0).isScalar() || MRI->getType(Op0).isVector()) && 593 "invalid operand type"); 594 assert(MRI->getType(Op0) == MRI->getType(Op1) && "type mismatch"); 595 assert(CmpInst::isFPPredicate(Pred) && "invalid predicate"); 596 if (MRI->getType(Op0).isScalar()) 597 assert(MRI->getType(Res).isScalar() && "type mismatch"); 598 else 599 assert(MRI->getType(Res).isVector() && 600 MRI->getType(Res).getNumElements() == 601 MRI->getType(Op0).getNumElements() && 602 "type mismatch"); 603 #endif 604 605 return buildInstr(TargetOpcode::G_FCMP) 606 .addDef(Res) 607 .addPredicate(Pred) 608 .addUse(Op0) 609 .addUse(Op1); 610 } 611 612 MachineInstrBuilder MachineIRBuilder::buildSelect(unsigned Res, unsigned Tst, 613 unsigned Op0, unsigned Op1) { 614 #ifndef NDEBUG 615 LLT ResTy = MRI->getType(Res); 616 assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) && 617 "invalid operand type"); 618 assert(ResTy == MRI->getType(Op0) && ResTy == MRI->getType(Op1) && 619 "type mismatch"); 620 if (ResTy.isScalar() || ResTy.isPointer()) 621 assert(MRI->getType(Tst).isScalar() && "type mismatch"); 622 else 623 assert((MRI->getType(Tst).isScalar() || 624 (MRI->getType(Tst).isVector() && 625 MRI->getType(Tst).getNumElements() == 626 MRI->getType(Op0).getNumElements())) && 627 "type mismatch"); 628 #endif 629 630 return buildInstr(TargetOpcode::G_SELECT) 631 .addDef(Res) 632 .addUse(Tst) 633 .addUse(Op0) 634 .addUse(Op1); 635 } 636 637 MachineInstrBuilder MachineIRBuilder::buildInsertVectorElement(unsigned Res, 638 unsigned Val, 639 unsigned Elt, 640 unsigned Idx) { 641 #ifndef NDEBUG 642 LLT ResTy = MRI->getType(Res); 643 LLT ValTy = MRI->getType(Val); 644 LLT EltTy = MRI->getType(Elt); 645 LLT IdxTy = MRI->getType(Idx); 646 assert(ResTy.isVector() && ValTy.isVector() && "invalid operand type"); 647 assert(IdxTy.isScalar() && "invalid operand type"); 648 assert(ResTy.getNumElements() == ValTy.getNumElements() && "type mismatch"); 649 assert(ResTy.getElementType() == EltTy && "type mismatch"); 650 #endif 651 652 return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT) 653 .addDef(Res) 654 .addUse(Val) 655 .addUse(Elt) 656 .addUse(Idx); 657 } 658 659 MachineInstrBuilder MachineIRBuilder::buildExtractVectorElement(unsigned Res, 660 unsigned Val, 661 unsigned Idx) { 662 #ifndef NDEBUG 663 LLT ResTy = MRI->getType(Res); 664 LLT ValTy = MRI->getType(Val); 665 LLT IdxTy = MRI->getType(Idx); 666 assert(ValTy.isVector() && "invalid operand type"); 667 assert((ResTy.isScalar() || ResTy.isPointer()) && "invalid operand type"); 668 assert(IdxTy.isScalar() && "invalid operand type"); 669 assert(ValTy.getElementType() == ResTy && "type mismatch"); 670 #endif 671 672 return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT) 673 .addDef(Res) 674 .addUse(Val) 675 .addUse(Idx); 676 } 677 678 void MachineIRBuilder::validateTruncExt(unsigned Dst, unsigned Src, 679 bool IsExtend) { 680 #ifndef NDEBUG 681 LLT SrcTy = MRI->getType(Src); 682 LLT DstTy = MRI->getType(Dst); 683 684 if (DstTy.isVector()) { 685 assert(SrcTy.isVector() && "mismatched cast between vecot and non-vector"); 686 assert(SrcTy.getNumElements() == DstTy.getNumElements() && 687 "different number of elements in a trunc/ext"); 688 } else 689 assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc"); 690 691 if (IsExtend) 692 assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() && 693 "invalid narrowing extend"); 694 else 695 assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() && 696 "invalid widening trunc"); 697 #endif 698 } 699