1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 /// \file 10 /// This file implements the MachineIRBuidler class. 11 //===----------------------------------------------------------------------===// 12 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h" 13 14 #include "llvm/CodeGen/MachineFunction.h" 15 #include "llvm/CodeGen/MachineInstr.h" 16 #include "llvm/CodeGen/MachineInstrBuilder.h" 17 #include "llvm/CodeGen/MachineRegisterInfo.h" 18 #include "llvm/IR/DebugInfo.h" 19 #include "llvm/Target/TargetInstrInfo.h" 20 #include "llvm/Target/TargetOpcodes.h" 21 #include "llvm/Target/TargetSubtargetInfo.h" 22 23 using namespace llvm; 24 25 void MachineIRBuilder::setMF(MachineFunction &MF) { 26 this->MF = &MF; 27 this->MBB = nullptr; 28 this->MRI = &MF.getRegInfo(); 29 this->TII = MF.getSubtarget().getInstrInfo(); 30 this->DL = DebugLoc(); 31 this->II = MachineBasicBlock::iterator(); 32 this->InsertedInstr = nullptr; 33 } 34 35 void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) { 36 this->MBB = &MBB; 37 this->II = MBB.end(); 38 assert(&getMF() == MBB.getParent() && 39 "Basic block is in a different function"); 40 } 41 42 void MachineIRBuilder::setInstr(MachineInstr &MI) { 43 assert(MI.getParent() && "Instruction is not part of a basic block"); 44 setMBB(*MI.getParent()); 45 this->II = MI.getIterator(); 46 } 47 48 void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB, 49 MachineBasicBlock::iterator II) { 50 assert(MBB.getParent() == &getMF() && 51 "Basic block is in a different function"); 52 this->MBB = &MBB; 53 this->II = II; 54 } 55 56 void MachineIRBuilder::recordInsertions( 57 std::function<void(MachineInstr *)> Inserted) { 58 InsertedInstr = std::move(Inserted); 59 } 60 61 void MachineIRBuilder::stopRecordingInsertions() { 62 InsertedInstr = nullptr; 63 } 64 65 //------------------------------------------------------------------------------ 66 // Build instruction variants. 67 //------------------------------------------------------------------------------ 68 69 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) { 70 return insertInstr(buildInstrNoInsert(Opcode)); 71 } 72 73 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) { 74 MachineInstrBuilder MIB = BuildMI(getMF(), DL, getTII().get(Opcode)); 75 return MIB; 76 } 77 78 79 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) { 80 getMBB().insert(getInsertPt(), MIB); 81 if (InsertedInstr) 82 InsertedInstr(MIB); 83 return MIB; 84 } 85 86 MachineInstrBuilder MachineIRBuilder::buildDirectDbgValue( 87 unsigned Reg, const MDNode *Variable, const MDNode *Expr) { 88 assert(isa<DILocalVariable>(Variable) && "not a variable"); 89 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 90 assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && 91 "Expected inlined-at fields to agree"); 92 return buildInstr(TargetOpcode::DBG_VALUE) 93 .addReg(Reg, RegState::Debug) 94 .addReg(0, RegState::Debug) 95 .addMetadata(Variable) 96 .addMetadata(Expr); 97 } 98 99 MachineInstrBuilder MachineIRBuilder::buildIndirectDbgValue( 100 unsigned Reg, unsigned Offset, const MDNode *Variable, const MDNode *Expr) { 101 assert(isa<DILocalVariable>(Variable) && "not a variable"); 102 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 103 assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && 104 "Expected inlined-at fields to agree"); 105 return buildInstr(TargetOpcode::DBG_VALUE) 106 .addReg(Reg, RegState::Debug) 107 .addImm(Offset) 108 .addMetadata(Variable) 109 .addMetadata(Expr); 110 } 111 112 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI, 113 const MDNode *Variable, 114 const MDNode *Expr) { 115 assert(isa<DILocalVariable>(Variable) && "not a variable"); 116 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 117 assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && 118 "Expected inlined-at fields to agree"); 119 return buildInstr(TargetOpcode::DBG_VALUE) 120 .addFrameIndex(FI) 121 .addImm(0) 122 .addMetadata(Variable) 123 .addMetadata(Expr); 124 } 125 126 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C, 127 unsigned Offset, 128 const MDNode *Variable, 129 const MDNode *Expr) { 130 assert(isa<DILocalVariable>(Variable) && "not a variable"); 131 assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); 132 assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && 133 "Expected inlined-at fields to agree"); 134 auto MIB = buildInstr(TargetOpcode::DBG_VALUE); 135 if (auto *CI = dyn_cast<ConstantInt>(&C)) { 136 if (CI->getBitWidth() > 64) 137 MIB.addCImm(CI); 138 else 139 MIB.addImm(CI->getZExtValue()); 140 } else 141 MIB.addFPImm(&cast<ConstantFP>(C)); 142 143 return MIB.addImm(Offset).addMetadata(Variable).addMetadata(Expr); 144 } 145 146 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(unsigned Res, int Idx) { 147 assert(MRI->getType(Res).isPointer() && "invalid operand type"); 148 return buildInstr(TargetOpcode::G_FRAME_INDEX) 149 .addDef(Res) 150 .addFrameIndex(Idx); 151 } 152 153 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(unsigned Res, 154 const GlobalValue *GV) { 155 assert(MRI->getType(Res).isPointer() && "invalid operand type"); 156 assert(MRI->getType(Res).getAddressSpace() == 157 GV->getType()->getAddressSpace() && 158 "address space mismatch"); 159 160 return buildInstr(TargetOpcode::G_GLOBAL_VALUE) 161 .addDef(Res) 162 .addGlobalAddress(GV); 163 } 164 165 MachineInstrBuilder MachineIRBuilder::buildAdd(unsigned Res, unsigned Op0, 166 unsigned Op1) { 167 assert((MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()) && 168 "invalid operand type"); 169 assert(MRI->getType(Res) == MRI->getType(Op0) && 170 MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); 171 172 return buildInstr(TargetOpcode::G_ADD) 173 .addDef(Res) 174 .addUse(Op0) 175 .addUse(Op1); 176 } 177 178 MachineInstrBuilder MachineIRBuilder::buildGEP(unsigned Res, unsigned Op0, 179 unsigned Op1) { 180 assert(MRI->getType(Res).isPointer() && 181 MRI->getType(Res) == MRI->getType(Op0) && "type mismatch"); 182 assert(MRI->getType(Op1).isScalar() && "invalid offset type"); 183 184 return buildInstr(TargetOpcode::G_GEP) 185 .addDef(Res) 186 .addUse(Op0) 187 .addUse(Op1); 188 } 189 190 MachineInstrBuilder MachineIRBuilder::buildSub(unsigned Res, unsigned Op0, 191 unsigned Op1) { 192 assert((MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()) && 193 "invalid operand type"); 194 assert(MRI->getType(Res) == MRI->getType(Op0) && 195 MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); 196 197 return buildInstr(TargetOpcode::G_SUB) 198 .addDef(Res) 199 .addUse(Op0) 200 .addUse(Op1); 201 } 202 203 MachineInstrBuilder MachineIRBuilder::buildMul(unsigned Res, unsigned Op0, 204 unsigned Op1) { 205 assert((MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()) && 206 "invalid operand type"); 207 assert(MRI->getType(Res) == MRI->getType(Op0) && 208 MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); 209 210 return buildInstr(TargetOpcode::G_MUL) 211 .addDef(Res) 212 .addUse(Op0) 213 .addUse(Op1); 214 } 215 216 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) { 217 return buildInstr(TargetOpcode::G_BR).addMBB(&Dest); 218 } 219 220 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(unsigned Tgt) { 221 return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt); 222 } 223 224 MachineInstrBuilder MachineIRBuilder::buildCopy(unsigned Res, unsigned Op) { 225 return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op); 226 } 227 228 MachineInstrBuilder MachineIRBuilder::buildConstant(unsigned Res, 229 const ConstantInt &Val) { 230 LLT Ty = MRI->getType(Res); 231 232 assert((Ty.isScalar() || Ty.isPointer()) && "invalid operand type"); 233 234 const ConstantInt *NewVal = &Val; 235 if (Ty.getSizeInBits() != Val.getBitWidth()) 236 NewVal = ConstantInt::get(MF->getFunction()->getContext(), 237 Val.getValue().sextOrTrunc(Ty.getSizeInBits())); 238 239 return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addCImm(NewVal); 240 } 241 242 MachineInstrBuilder MachineIRBuilder::buildConstant(unsigned Res, 243 int64_t Val) { 244 auto IntN = IntegerType::get(MF->getFunction()->getContext(), 245 MRI->getType(Res).getSizeInBits()); 246 ConstantInt *CI = ConstantInt::get(IntN, Val, true); 247 return buildConstant(Res, *CI); 248 } 249 250 MachineInstrBuilder MachineIRBuilder::buildFConstant(unsigned Res, 251 const ConstantFP &Val) { 252 assert(MRI->getType(Res).isScalar() && "invalid operand type"); 253 254 return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val); 255 } 256 257 MachineInstrBuilder MachineIRBuilder::buildBrCond(unsigned Tst, 258 MachineBasicBlock &Dest) { 259 assert(MRI->getType(Tst).isScalar() && "invalid operand type"); 260 261 return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest); 262 } 263 264 MachineInstrBuilder MachineIRBuilder::buildLoad(unsigned Res, unsigned Addr, 265 MachineMemOperand &MMO) { 266 assert(MRI->getType(Res).isValid() && "invalid operand type"); 267 assert(MRI->getType(Addr).isPointer() && "invalid operand type"); 268 269 return buildInstr(TargetOpcode::G_LOAD) 270 .addDef(Res) 271 .addUse(Addr) 272 .addMemOperand(&MMO); 273 } 274 275 MachineInstrBuilder MachineIRBuilder::buildStore(unsigned Val, unsigned Addr, 276 MachineMemOperand &MMO) { 277 assert(MRI->getType(Val).isValid() && "invalid operand type"); 278 assert(MRI->getType(Addr).isPointer() && "invalid operand type"); 279 280 return buildInstr(TargetOpcode::G_STORE) 281 .addUse(Val) 282 .addUse(Addr) 283 .addMemOperand(&MMO); 284 } 285 286 MachineInstrBuilder MachineIRBuilder::buildUAdde(unsigned Res, 287 unsigned CarryOut, 288 unsigned Op0, unsigned Op1, 289 unsigned CarryIn) { 290 assert(MRI->getType(Res).isScalar() && "invalid operand type"); 291 assert(MRI->getType(Res) == MRI->getType(Op0) && 292 MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); 293 assert(MRI->getType(CarryOut).isScalar() && "invalid operand type"); 294 assert(MRI->getType(CarryOut) == MRI->getType(CarryIn) && "type mismatch"); 295 296 return buildInstr(TargetOpcode::G_UADDE) 297 .addDef(Res) 298 .addDef(CarryOut) 299 .addUse(Op0) 300 .addUse(Op1) 301 .addUse(CarryIn); 302 } 303 304 MachineInstrBuilder MachineIRBuilder::buildAnyExt(unsigned Res, unsigned Op) { 305 validateTruncExt(Res, Op, true); 306 return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op); 307 } 308 309 MachineInstrBuilder MachineIRBuilder::buildSExt(unsigned Res, unsigned Op) { 310 validateTruncExt(Res, Op, true); 311 return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op); 312 } 313 314 MachineInstrBuilder MachineIRBuilder::buildZExt(unsigned Res, unsigned Op) { 315 validateTruncExt(Res, Op, true); 316 return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op); 317 } 318 319 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(unsigned Res, 320 unsigned Op) { 321 unsigned Opcode = TargetOpcode::COPY; 322 if (MRI->getType(Res).getSizeInBits() > MRI->getType(Op).getSizeInBits()) 323 Opcode = TargetOpcode::G_SEXT; 324 else if (MRI->getType(Res).getSizeInBits() < MRI->getType(Op).getSizeInBits()) 325 Opcode = TargetOpcode::G_TRUNC; 326 327 return buildInstr(Opcode).addDef(Res).addUse(Op); 328 } 329 330 MachineInstrBuilder MachineIRBuilder::buildExtract(ArrayRef<unsigned> Results, 331 ArrayRef<uint64_t> Indices, 332 unsigned Src) { 333 #ifndef NDEBUG 334 assert(Results.size() == Indices.size() && "inconsistent number of regs"); 335 assert(!Results.empty() && "invalid trivial extract"); 336 assert(std::is_sorted(Indices.begin(), Indices.end()) && 337 "extract offsets must be in ascending order"); 338 339 assert(MRI->getType(Src).isValid() && "invalid operand type"); 340 for (auto Res : Results) 341 assert(MRI->getType(Res).isValid() && "invalid operand type"); 342 #endif 343 344 auto MIB = BuildMI(getMF(), DL, getTII().get(TargetOpcode::G_EXTRACT)); 345 for (auto Res : Results) 346 MIB.addDef(Res); 347 348 MIB.addUse(Src); 349 350 for (auto Idx : Indices) 351 MIB.addImm(Idx); 352 353 getMBB().insert(getInsertPt(), MIB); 354 if (InsertedInstr) 355 InsertedInstr(MIB); 356 357 return MIB; 358 } 359 360 MachineInstrBuilder 361 MachineIRBuilder::buildSequence(unsigned Res, 362 ArrayRef<unsigned> Ops, 363 ArrayRef<uint64_t> Indices) { 364 #ifndef NDEBUG 365 assert(Ops.size() == Indices.size() && "incompatible args"); 366 assert(!Ops.empty() && "invalid trivial sequence"); 367 assert(std::is_sorted(Indices.begin(), Indices.end()) && 368 "sequence offsets must be in ascending order"); 369 370 assert(MRI->getType(Res).isValid() && "invalid operand type"); 371 for (auto Op : Ops) 372 assert(MRI->getType(Op).isValid() && "invalid operand type"); 373 #endif 374 375 MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_SEQUENCE); 376 MIB.addDef(Res); 377 for (unsigned i = 0; i < Ops.size(); ++i) { 378 MIB.addUse(Ops[i]); 379 MIB.addImm(Indices[i]); 380 } 381 return MIB; 382 } 383 384 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID, 385 unsigned Res, 386 bool HasSideEffects) { 387 auto MIB = 388 buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS 389 : TargetOpcode::G_INTRINSIC); 390 if (Res) 391 MIB.addDef(Res); 392 MIB.addIntrinsicID(ID); 393 return MIB; 394 } 395 396 MachineInstrBuilder MachineIRBuilder::buildTrunc(unsigned Res, unsigned Op) { 397 validateTruncExt(Res, Op, false); 398 return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op); 399 } 400 401 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(unsigned Res, unsigned Op) { 402 validateTruncExt(Res, Op, false); 403 return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op); 404 } 405 406 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred, 407 unsigned Res, unsigned Op0, 408 unsigned Op1) { 409 #ifndef NDEBUG 410 assert(MRI->getType(Op0) == MRI->getType(Op0) && "type mismatch"); 411 assert(CmpInst::isIntPredicate(Pred) && "invalid predicate"); 412 if (MRI->getType(Op0).isScalar() || MRI->getType(Op0).isPointer()) 413 assert(MRI->getType(Res).isScalar() && "type mismatch"); 414 else 415 assert(MRI->getType(Res).isVector() && 416 MRI->getType(Res).getNumElements() == 417 MRI->getType(Op0).getNumElements() && 418 "type mismatch"); 419 #endif 420 421 return buildInstr(TargetOpcode::G_ICMP) 422 .addDef(Res) 423 .addPredicate(Pred) 424 .addUse(Op0) 425 .addUse(Op1); 426 } 427 428 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred, 429 unsigned Res, unsigned Op0, 430 unsigned Op1) { 431 #ifndef NDEBUG 432 assert((MRI->getType(Op0).isScalar() || MRI->getType(Op0).isVector()) && 433 "invalid operand type"); 434 assert(MRI->getType(Op0) == MRI->getType(Op1) && "type mismatch"); 435 assert(CmpInst::isFPPredicate(Pred) && "invalid predicate"); 436 if (MRI->getType(Op0).isScalar()) 437 assert(MRI->getType(Res).isScalar() && "type mismatch"); 438 else 439 assert(MRI->getType(Res).isVector() && 440 MRI->getType(Res).getNumElements() == 441 MRI->getType(Op0).getNumElements() && 442 "type mismatch"); 443 #endif 444 445 return buildInstr(TargetOpcode::G_FCMP) 446 .addDef(Res) 447 .addPredicate(Pred) 448 .addUse(Op0) 449 .addUse(Op1); 450 } 451 452 MachineInstrBuilder MachineIRBuilder::buildSelect(unsigned Res, unsigned Tst, 453 unsigned Op0, unsigned Op1) { 454 #ifndef NDEBUG 455 LLT ResTy = MRI->getType(Res); 456 assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) && 457 "invalid operand type"); 458 assert(ResTy == MRI->getType(Op0) && ResTy == MRI->getType(Op1) && 459 "type mismatch"); 460 if (ResTy.isScalar() || ResTy.isPointer()) 461 assert(MRI->getType(Tst).isScalar() && "type mismatch"); 462 else 463 assert(MRI->getType(Tst).isVector() && 464 MRI->getType(Tst).getNumElements() == 465 MRI->getType(Op0).getNumElements() && 466 "type mismatch"); 467 #endif 468 469 return buildInstr(TargetOpcode::G_SELECT) 470 .addDef(Res) 471 .addUse(Tst) 472 .addUse(Op0) 473 .addUse(Op1); 474 } 475 476 void MachineIRBuilder::validateTruncExt(unsigned Dst, unsigned Src, 477 bool IsExtend) { 478 #ifndef NDEBUG 479 LLT SrcTy = MRI->getType(Src); 480 LLT DstTy = MRI->getType(Dst); 481 482 if (DstTy.isVector()) { 483 assert(SrcTy.isVector() && "mismatched cast between vecot and non-vector"); 484 assert(SrcTy.getNumElements() == DstTy.getNumElements() && 485 "different number of elements in a trunc/ext"); 486 } else 487 assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc"); 488 489 if (IsExtend) 490 assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() && 491 "invalid narrowing extend"); 492 else 493 assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() && 494 "invalid widening trunc"); 495 #endif 496 } 497