1139f7f9bSDimitry Andric //===-- TargetInstrInfo.cpp - Target Instruction Information --------------===//
2139f7f9bSDimitry Andric //
3139f7f9bSDimitry Andric // The LLVM Compiler Infrastructure
4139f7f9bSDimitry Andric //
5139f7f9bSDimitry Andric // This file is distributed under the University of Illinois Open Source
6139f7f9bSDimitry Andric // License. See LICENSE.TXT for details.
7139f7f9bSDimitry Andric //
8139f7f9bSDimitry Andric //===----------------------------------------------------------------------===//
9139f7f9bSDimitry Andric //
10139f7f9bSDimitry Andric // This file implements the TargetInstrInfo class.
11139f7f9bSDimitry Andric //
12139f7f9bSDimitry Andric //===----------------------------------------------------------------------===//
13139f7f9bSDimitry Andric
142cab237bSDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h"
15139f7f9bSDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h"
1691bc56edSDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h"
17139f7f9bSDimitry Andric #include "llvm/CodeGen/MachineMemOperand.h"
18139f7f9bSDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h"
19139f7f9bSDimitry Andric #include "llvm/CodeGen/PseudoSourceValue.h"
20139f7f9bSDimitry Andric #include "llvm/CodeGen/ScoreboardHazardRecognizer.h"
2191bc56edSDimitry Andric #include "llvm/CodeGen/StackMaps.h"
222cab237bSDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h"
232cab237bSDimitry Andric #include "llvm/CodeGen/TargetLowering.h"
242cab237bSDimitry Andric #include "llvm/CodeGen/TargetRegisterInfo.h"
258f0fd8f6SDimitry Andric #include "llvm/CodeGen/TargetSchedule.h"
26f785676fSDimitry Andric #include "llvm/IR/DataLayout.h"
27139f7f9bSDimitry Andric #include "llvm/MC/MCAsmInfo.h"
28139f7f9bSDimitry Andric #include "llvm/MC/MCInstrItineraries.h"
29139f7f9bSDimitry Andric #include "llvm/Support/CommandLine.h"
30139f7f9bSDimitry Andric #include "llvm/Support/ErrorHandling.h"
31139f7f9bSDimitry Andric #include "llvm/Support/raw_ostream.h"
32139f7f9bSDimitry Andric #include "llvm/Target/TargetMachine.h"
33139f7f9bSDimitry Andric #include <cctype>
343ca95b02SDimitry Andric
35139f7f9bSDimitry Andric using namespace llvm;
36139f7f9bSDimitry Andric
37139f7f9bSDimitry Andric static cl::opt<bool> DisableHazardRecognizer(
38139f7f9bSDimitry Andric "disable-sched-hazard", cl::Hidden, cl::init(false),
39139f7f9bSDimitry Andric cl::desc("Disable hazard detection during preRA scheduling"));
40139f7f9bSDimitry Andric
~TargetInstrInfo()41139f7f9bSDimitry Andric TargetInstrInfo::~TargetInstrInfo() {
42139f7f9bSDimitry Andric }
43139f7f9bSDimitry Andric
44139f7f9bSDimitry Andric const TargetRegisterClass*
getRegClass(const MCInstrDesc & MCID,unsigned OpNum,const TargetRegisterInfo * TRI,const MachineFunction & MF) const45139f7f9bSDimitry Andric TargetInstrInfo::getRegClass(const MCInstrDesc &MCID, unsigned OpNum,
46139f7f9bSDimitry Andric const TargetRegisterInfo *TRI,
47139f7f9bSDimitry Andric const MachineFunction &MF) const {
48139f7f9bSDimitry Andric if (OpNum >= MCID.getNumOperands())
4991bc56edSDimitry Andric return nullptr;
50139f7f9bSDimitry Andric
51139f7f9bSDimitry Andric short RegClass = MCID.OpInfo[OpNum].RegClass;
52139f7f9bSDimitry Andric if (MCID.OpInfo[OpNum].isLookupPtrRegClass())
53139f7f9bSDimitry Andric return TRI->getPointerRegClass(MF, RegClass);
54139f7f9bSDimitry Andric
55139f7f9bSDimitry Andric // Instructions like INSERT_SUBREG do not have fixed register classes.
56139f7f9bSDimitry Andric if (RegClass < 0)
5791bc56edSDimitry Andric return nullptr;
58139f7f9bSDimitry Andric
59139f7f9bSDimitry Andric // Otherwise just look it up normally.
60139f7f9bSDimitry Andric return TRI->getRegClass(RegClass);
61139f7f9bSDimitry Andric }
62139f7f9bSDimitry Andric
63139f7f9bSDimitry Andric /// insertNoop - Insert a noop into the instruction stream at the specified
64139f7f9bSDimitry Andric /// point.
insertNoop(MachineBasicBlock & MBB,MachineBasicBlock::iterator MI) const65139f7f9bSDimitry Andric void TargetInstrInfo::insertNoop(MachineBasicBlock &MBB,
66139f7f9bSDimitry Andric MachineBasicBlock::iterator MI) const {
67139f7f9bSDimitry Andric llvm_unreachable("Target didn't implement insertNoop!");
68139f7f9bSDimitry Andric }
69139f7f9bSDimitry Andric
isAsmComment(const char * Str,const MCAsmInfo & MAI)702cab237bSDimitry Andric static bool isAsmComment(const char *Str, const MCAsmInfo &MAI) {
712cab237bSDimitry Andric return strncmp(Str, MAI.getCommentString().data(),
722cab237bSDimitry Andric MAI.getCommentString().size()) == 0;
732cab237bSDimitry Andric }
742cab237bSDimitry Andric
75139f7f9bSDimitry Andric /// Measure the specified inline asm to determine an approximation of its
76139f7f9bSDimitry Andric /// length.
77139f7f9bSDimitry Andric /// Comments (which run till the next SeparatorString or newline) do not
78139f7f9bSDimitry Andric /// count as an instruction.
79139f7f9bSDimitry Andric /// Any other non-whitespace text is considered an instruction, with
80139f7f9bSDimitry Andric /// multiple instructions separated by SeparatorString or newlines.
81139f7f9bSDimitry Andric /// Variable-length instructions are not handled here; this function
82139f7f9bSDimitry Andric /// may be overloaded in the target code to do that.
832cab237bSDimitry Andric /// We implement a special case of the .space directive which takes only a
842cab237bSDimitry Andric /// single integer argument in base 10 that is the size in bytes. This is a
852cab237bSDimitry Andric /// restricted form of the GAS directive in that we only interpret
862cab237bSDimitry Andric /// simple--i.e. not a logical or arithmetic expression--size values without
872cab237bSDimitry Andric /// the optional fill value. This is primarily used for creating arbitrary
882cab237bSDimitry Andric /// sized inline asm blocks for testing purposes.
getInlineAsmLength(const char * Str,const MCAsmInfo & MAI) const89139f7f9bSDimitry Andric unsigned TargetInstrInfo::getInlineAsmLength(const char *Str,
90139f7f9bSDimitry Andric const MCAsmInfo &MAI) const {
91139f7f9bSDimitry Andric // Count the number of instructions in the asm.
922cab237bSDimitry Andric bool AtInsnStart = true;
932cab237bSDimitry Andric unsigned Length = 0;
94139f7f9bSDimitry Andric for (; *Str; ++Str) {
95139f7f9bSDimitry Andric if (*Str == '\n' || strncmp(Str, MAI.getSeparatorString(),
963ca95b02SDimitry Andric strlen(MAI.getSeparatorString())) == 0) {
972cab237bSDimitry Andric AtInsnStart = true;
982cab237bSDimitry Andric } else if (isAsmComment(Str, MAI)) {
993ca95b02SDimitry Andric // Stop counting as an instruction after a comment until the next
1003ca95b02SDimitry Andric // separator.
1012cab237bSDimitry Andric AtInsnStart = false;
102139f7f9bSDimitry Andric }
103139f7f9bSDimitry Andric
1042cab237bSDimitry Andric if (AtInsnStart && !std::isspace(static_cast<unsigned char>(*Str))) {
1052cab237bSDimitry Andric unsigned AddLength = MAI.getMaxInstLength();
1062cab237bSDimitry Andric if (strncmp(Str, ".space", 6) == 0) {
1072cab237bSDimitry Andric char *EStr;
1082cab237bSDimitry Andric int SpaceSize;
1092cab237bSDimitry Andric SpaceSize = strtol(Str + 6, &EStr, 10);
1102cab237bSDimitry Andric SpaceSize = SpaceSize < 0 ? 0 : SpaceSize;
1112cab237bSDimitry Andric while (*EStr != '\n' && std::isspace(static_cast<unsigned char>(*EStr)))
1122cab237bSDimitry Andric ++EStr;
1132cab237bSDimitry Andric if (*EStr == '\0' || *EStr == '\n' ||
1142cab237bSDimitry Andric isAsmComment(EStr, MAI)) // Successfully parsed .space argument
1152cab237bSDimitry Andric AddLength = SpaceSize;
1162cab237bSDimitry Andric }
1172cab237bSDimitry Andric Length += AddLength;
1182cab237bSDimitry Andric AtInsnStart = false;
1193ca95b02SDimitry Andric }
1203ca95b02SDimitry Andric }
1213ca95b02SDimitry Andric
1222cab237bSDimitry Andric return Length;
123139f7f9bSDimitry Andric }
124139f7f9bSDimitry Andric
125139f7f9bSDimitry Andric /// ReplaceTailWithBranchTo - Delete the instruction OldInst and everything
126139f7f9bSDimitry Andric /// after it, replacing it with an unconditional branch to NewDest.
127139f7f9bSDimitry Andric void
ReplaceTailWithBranchTo(MachineBasicBlock::iterator Tail,MachineBasicBlock * NewDest) const128139f7f9bSDimitry Andric TargetInstrInfo::ReplaceTailWithBranchTo(MachineBasicBlock::iterator Tail,
129139f7f9bSDimitry Andric MachineBasicBlock *NewDest) const {
130139f7f9bSDimitry Andric MachineBasicBlock *MBB = Tail->getParent();
131139f7f9bSDimitry Andric
132139f7f9bSDimitry Andric // Remove all the old successors of MBB from the CFG.
133139f7f9bSDimitry Andric while (!MBB->succ_empty())
134139f7f9bSDimitry Andric MBB->removeSuccessor(MBB->succ_begin());
135139f7f9bSDimitry Andric
1363ca95b02SDimitry Andric // Save off the debug loc before erasing the instruction.
1373ca95b02SDimitry Andric DebugLoc DL = Tail->getDebugLoc();
1383ca95b02SDimitry Andric
139139f7f9bSDimitry Andric // Remove all the dead instructions from the end of MBB.
140139f7f9bSDimitry Andric MBB->erase(Tail, MBB->end());
141139f7f9bSDimitry Andric
142139f7f9bSDimitry Andric // If MBB isn't immediately before MBB, insert a branch to it.
143139f7f9bSDimitry Andric if (++MachineFunction::iterator(MBB) != MachineFunction::iterator(NewDest))
144d88c1a5aSDimitry Andric insertBranch(*MBB, NewDest, nullptr, SmallVector<MachineOperand, 0>(), DL);
145139f7f9bSDimitry Andric MBB->addSuccessor(NewDest);
146139f7f9bSDimitry Andric }
147139f7f9bSDimitry Andric
commuteInstructionImpl(MachineInstr & MI,bool NewMI,unsigned Idx1,unsigned Idx2) const1483ca95b02SDimitry Andric MachineInstr *TargetInstrInfo::commuteInstructionImpl(MachineInstr &MI,
1493ca95b02SDimitry Andric bool NewMI, unsigned Idx1,
1507d523365SDimitry Andric unsigned Idx2) const {
1513ca95b02SDimitry Andric const MCInstrDesc &MCID = MI.getDesc();
152139f7f9bSDimitry Andric bool HasDef = MCID.getNumDefs();
1533ca95b02SDimitry Andric if (HasDef && !MI.getOperand(0).isReg())
154139f7f9bSDimitry Andric // No idea how to commute this instruction. Target should implement its own.
15591bc56edSDimitry Andric return nullptr;
156139f7f9bSDimitry Andric
1577d523365SDimitry Andric unsigned CommutableOpIdx1 = Idx1; (void)CommutableOpIdx1;
1587d523365SDimitry Andric unsigned CommutableOpIdx2 = Idx2; (void)CommutableOpIdx2;
1597d523365SDimitry Andric assert(findCommutedOpIndices(MI, CommutableOpIdx1, CommutableOpIdx2) &&
1607d523365SDimitry Andric CommutableOpIdx1 == Idx1 && CommutableOpIdx2 == Idx2 &&
1617d523365SDimitry Andric "TargetInstrInfo::CommuteInstructionImpl(): not commutable operands.");
1623ca95b02SDimitry Andric assert(MI.getOperand(Idx1).isReg() && MI.getOperand(Idx2).isReg() &&
163139f7f9bSDimitry Andric "This only knows how to commute register operands so far");
1647d523365SDimitry Andric
1653ca95b02SDimitry Andric unsigned Reg0 = HasDef ? MI.getOperand(0).getReg() : 0;
1663ca95b02SDimitry Andric unsigned Reg1 = MI.getOperand(Idx1).getReg();
1673ca95b02SDimitry Andric unsigned Reg2 = MI.getOperand(Idx2).getReg();
1683ca95b02SDimitry Andric unsigned SubReg0 = HasDef ? MI.getOperand(0).getSubReg() : 0;
1693ca95b02SDimitry Andric unsigned SubReg1 = MI.getOperand(Idx1).getSubReg();
1703ca95b02SDimitry Andric unsigned SubReg2 = MI.getOperand(Idx2).getSubReg();
1713ca95b02SDimitry Andric bool Reg1IsKill = MI.getOperand(Idx1).isKill();
1723ca95b02SDimitry Andric bool Reg2IsKill = MI.getOperand(Idx2).isKill();
1733ca95b02SDimitry Andric bool Reg1IsUndef = MI.getOperand(Idx1).isUndef();
1743ca95b02SDimitry Andric bool Reg2IsUndef = MI.getOperand(Idx2).isUndef();
1753ca95b02SDimitry Andric bool Reg1IsInternal = MI.getOperand(Idx1).isInternalRead();
1763ca95b02SDimitry Andric bool Reg2IsInternal = MI.getOperand(Idx2).isInternalRead();
1774ba319b5SDimitry Andric // Avoid calling isRenamable for virtual registers since we assert that
1784ba319b5SDimitry Andric // renamable property is only queried/set for physical registers.
1794ba319b5SDimitry Andric bool Reg1IsRenamable = TargetRegisterInfo::isPhysicalRegister(Reg1)
1804ba319b5SDimitry Andric ? MI.getOperand(Idx1).isRenamable()
1814ba319b5SDimitry Andric : false;
1824ba319b5SDimitry Andric bool Reg2IsRenamable = TargetRegisterInfo::isPhysicalRegister(Reg2)
1834ba319b5SDimitry Andric ? MI.getOperand(Idx2).isRenamable()
1844ba319b5SDimitry Andric : false;
185139f7f9bSDimitry Andric // If destination is tied to either of the commuted source register, then
186139f7f9bSDimitry Andric // it must be updated.
187139f7f9bSDimitry Andric if (HasDef && Reg0 == Reg1 &&
1883ca95b02SDimitry Andric MI.getDesc().getOperandConstraint(Idx1, MCOI::TIED_TO) == 0) {
189139f7f9bSDimitry Andric Reg2IsKill = false;
190139f7f9bSDimitry Andric Reg0 = Reg2;
191139f7f9bSDimitry Andric SubReg0 = SubReg2;
192139f7f9bSDimitry Andric } else if (HasDef && Reg0 == Reg2 &&
1933ca95b02SDimitry Andric MI.getDesc().getOperandConstraint(Idx2, MCOI::TIED_TO) == 0) {
194139f7f9bSDimitry Andric Reg1IsKill = false;
195139f7f9bSDimitry Andric Reg0 = Reg1;
196139f7f9bSDimitry Andric SubReg0 = SubReg1;
197139f7f9bSDimitry Andric }
198139f7f9bSDimitry Andric
1993ca95b02SDimitry Andric MachineInstr *CommutedMI = nullptr;
200139f7f9bSDimitry Andric if (NewMI) {
201139f7f9bSDimitry Andric // Create a new instruction.
2022cab237bSDimitry Andric MachineFunction &MF = *MI.getMF();
2033ca95b02SDimitry Andric CommutedMI = MF.CloneMachineInstr(&MI);
2043ca95b02SDimitry Andric } else {
2053ca95b02SDimitry Andric CommutedMI = &MI;
206139f7f9bSDimitry Andric }
207139f7f9bSDimitry Andric
208139f7f9bSDimitry Andric if (HasDef) {
2093ca95b02SDimitry Andric CommutedMI->getOperand(0).setReg(Reg0);
2103ca95b02SDimitry Andric CommutedMI->getOperand(0).setSubReg(SubReg0);
211139f7f9bSDimitry Andric }
2123ca95b02SDimitry Andric CommutedMI->getOperand(Idx2).setReg(Reg1);
2133ca95b02SDimitry Andric CommutedMI->getOperand(Idx1).setReg(Reg2);
2143ca95b02SDimitry Andric CommutedMI->getOperand(Idx2).setSubReg(SubReg1);
2153ca95b02SDimitry Andric CommutedMI->getOperand(Idx1).setSubReg(SubReg2);
2163ca95b02SDimitry Andric CommutedMI->getOperand(Idx2).setIsKill(Reg1IsKill);
2173ca95b02SDimitry Andric CommutedMI->getOperand(Idx1).setIsKill(Reg2IsKill);
2183ca95b02SDimitry Andric CommutedMI->getOperand(Idx2).setIsUndef(Reg1IsUndef);
2193ca95b02SDimitry Andric CommutedMI->getOperand(Idx1).setIsUndef(Reg2IsUndef);
2203ca95b02SDimitry Andric CommutedMI->getOperand(Idx2).setIsInternalRead(Reg1IsInternal);
2213ca95b02SDimitry Andric CommutedMI->getOperand(Idx1).setIsInternalRead(Reg2IsInternal);
2224ba319b5SDimitry Andric // Avoid calling setIsRenamable for virtual registers since we assert that
2234ba319b5SDimitry Andric // renamable property is only queried/set for physical registers.
2244ba319b5SDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(Reg1))
2254ba319b5SDimitry Andric CommutedMI->getOperand(Idx2).setIsRenamable(Reg1IsRenamable);
2264ba319b5SDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(Reg2))
2274ba319b5SDimitry Andric CommutedMI->getOperand(Idx1).setIsRenamable(Reg2IsRenamable);
2283ca95b02SDimitry Andric return CommutedMI;
229139f7f9bSDimitry Andric }
230139f7f9bSDimitry Andric
commuteInstruction(MachineInstr & MI,bool NewMI,unsigned OpIdx1,unsigned OpIdx2) const2313ca95b02SDimitry Andric MachineInstr *TargetInstrInfo::commuteInstruction(MachineInstr &MI, bool NewMI,
2327d523365SDimitry Andric unsigned OpIdx1,
2337d523365SDimitry Andric unsigned OpIdx2) const {
2347d523365SDimitry Andric // If OpIdx1 or OpIdx2 is not specified, then this method is free to choose
2357d523365SDimitry Andric // any commutable operand, which is done in findCommutedOpIndices() method
2367d523365SDimitry Andric // called below.
2377d523365SDimitry Andric if ((OpIdx1 == CommuteAnyOperandIndex || OpIdx2 == CommuteAnyOperandIndex) &&
2387d523365SDimitry Andric !findCommutedOpIndices(MI, OpIdx1, OpIdx2)) {
2393ca95b02SDimitry Andric assert(MI.isCommutable() &&
2407d523365SDimitry Andric "Precondition violation: MI must be commutable.");
2417d523365SDimitry Andric return nullptr;
2427d523365SDimitry Andric }
2437d523365SDimitry Andric return commuteInstructionImpl(MI, NewMI, OpIdx1, OpIdx2);
2447d523365SDimitry Andric }
2457d523365SDimitry Andric
fixCommutedOpIndices(unsigned & ResultIdx1,unsigned & ResultIdx2,unsigned CommutableOpIdx1,unsigned CommutableOpIdx2)2467d523365SDimitry Andric bool TargetInstrInfo::fixCommutedOpIndices(unsigned &ResultIdx1,
2477d523365SDimitry Andric unsigned &ResultIdx2,
2487d523365SDimitry Andric unsigned CommutableOpIdx1,
2497d523365SDimitry Andric unsigned CommutableOpIdx2) {
2507d523365SDimitry Andric if (ResultIdx1 == CommuteAnyOperandIndex &&
2517d523365SDimitry Andric ResultIdx2 == CommuteAnyOperandIndex) {
2527d523365SDimitry Andric ResultIdx1 = CommutableOpIdx1;
2537d523365SDimitry Andric ResultIdx2 = CommutableOpIdx2;
2547d523365SDimitry Andric } else if (ResultIdx1 == CommuteAnyOperandIndex) {
2557d523365SDimitry Andric if (ResultIdx2 == CommutableOpIdx1)
2567d523365SDimitry Andric ResultIdx1 = CommutableOpIdx2;
2577d523365SDimitry Andric else if (ResultIdx2 == CommutableOpIdx2)
2587d523365SDimitry Andric ResultIdx1 = CommutableOpIdx1;
2597d523365SDimitry Andric else
2607d523365SDimitry Andric return false;
2617d523365SDimitry Andric } else if (ResultIdx2 == CommuteAnyOperandIndex) {
2627d523365SDimitry Andric if (ResultIdx1 == CommutableOpIdx1)
2637d523365SDimitry Andric ResultIdx2 = CommutableOpIdx2;
2647d523365SDimitry Andric else if (ResultIdx1 == CommutableOpIdx2)
2657d523365SDimitry Andric ResultIdx2 = CommutableOpIdx1;
2667d523365SDimitry Andric else
2677d523365SDimitry Andric return false;
2687d523365SDimitry Andric } else
2697d523365SDimitry Andric // Check that the result operand indices match the given commutable
2707d523365SDimitry Andric // operand indices.
2717d523365SDimitry Andric return (ResultIdx1 == CommutableOpIdx1 && ResultIdx2 == CommutableOpIdx2) ||
2727d523365SDimitry Andric (ResultIdx1 == CommutableOpIdx2 && ResultIdx2 == CommutableOpIdx1);
2737d523365SDimitry Andric
2747d523365SDimitry Andric return true;
2757d523365SDimitry Andric }
2767d523365SDimitry Andric
findCommutedOpIndices(MachineInstr & MI,unsigned & SrcOpIdx1,unsigned & SrcOpIdx2) const2773ca95b02SDimitry Andric bool TargetInstrInfo::findCommutedOpIndices(MachineInstr &MI,
278139f7f9bSDimitry Andric unsigned &SrcOpIdx1,
279139f7f9bSDimitry Andric unsigned &SrcOpIdx2) const {
2803ca95b02SDimitry Andric assert(!MI.isBundle() &&
281139f7f9bSDimitry Andric "TargetInstrInfo::findCommutedOpIndices() can't handle bundles");
282139f7f9bSDimitry Andric
2833ca95b02SDimitry Andric const MCInstrDesc &MCID = MI.getDesc();
284139f7f9bSDimitry Andric if (!MCID.isCommutable())
285139f7f9bSDimitry Andric return false;
2867d523365SDimitry Andric
287139f7f9bSDimitry Andric // This assumes v0 = op v1, v2 and commuting would swap v1 and v2. If this
288139f7f9bSDimitry Andric // is not true, then the target must implement this.
2897d523365SDimitry Andric unsigned CommutableOpIdx1 = MCID.getNumDefs();
2907d523365SDimitry Andric unsigned CommutableOpIdx2 = CommutableOpIdx1 + 1;
2917d523365SDimitry Andric if (!fixCommutedOpIndices(SrcOpIdx1, SrcOpIdx2,
2927d523365SDimitry Andric CommutableOpIdx1, CommutableOpIdx2))
2937d523365SDimitry Andric return false;
2947d523365SDimitry Andric
2953ca95b02SDimitry Andric if (!MI.getOperand(SrcOpIdx1).isReg() || !MI.getOperand(SrcOpIdx2).isReg())
296139f7f9bSDimitry Andric // No idea.
297139f7f9bSDimitry Andric return false;
298139f7f9bSDimitry Andric return true;
299139f7f9bSDimitry Andric }
300139f7f9bSDimitry Andric
isUnpredicatedTerminator(const MachineInstr & MI) const3013ca95b02SDimitry Andric bool TargetInstrInfo::isUnpredicatedTerminator(const MachineInstr &MI) const {
3023ca95b02SDimitry Andric if (!MI.isTerminator()) return false;
303139f7f9bSDimitry Andric
304139f7f9bSDimitry Andric // Conditional branch is a special case.
3053ca95b02SDimitry Andric if (MI.isBranch() && !MI.isBarrier())
306139f7f9bSDimitry Andric return true;
3073ca95b02SDimitry Andric if (!MI.isPredicable())
308139f7f9bSDimitry Andric return true;
309139f7f9bSDimitry Andric return !isPredicated(MI);
310139f7f9bSDimitry Andric }
311139f7f9bSDimitry Andric
PredicateInstruction(MachineInstr & MI,ArrayRef<MachineOperand> Pred) const3128f0fd8f6SDimitry Andric bool TargetInstrInfo::PredicateInstruction(
3133ca95b02SDimitry Andric MachineInstr &MI, ArrayRef<MachineOperand> Pred) const {
314139f7f9bSDimitry Andric bool MadeChange = false;
315139f7f9bSDimitry Andric
3163ca95b02SDimitry Andric assert(!MI.isBundle() &&
317139f7f9bSDimitry Andric "TargetInstrInfo::PredicateInstruction() can't handle bundles");
318139f7f9bSDimitry Andric
3193ca95b02SDimitry Andric const MCInstrDesc &MCID = MI.getDesc();
3203ca95b02SDimitry Andric if (!MI.isPredicable())
321139f7f9bSDimitry Andric return false;
322139f7f9bSDimitry Andric
3233ca95b02SDimitry Andric for (unsigned j = 0, i = 0, e = MI.getNumOperands(); i != e; ++i) {
324139f7f9bSDimitry Andric if (MCID.OpInfo[i].isPredicate()) {
3253ca95b02SDimitry Andric MachineOperand &MO = MI.getOperand(i);
326139f7f9bSDimitry Andric if (MO.isReg()) {
327139f7f9bSDimitry Andric MO.setReg(Pred[j].getReg());
328139f7f9bSDimitry Andric MadeChange = true;
329139f7f9bSDimitry Andric } else if (MO.isImm()) {
330139f7f9bSDimitry Andric MO.setImm(Pred[j].getImm());
331139f7f9bSDimitry Andric MadeChange = true;
332139f7f9bSDimitry Andric } else if (MO.isMBB()) {
333139f7f9bSDimitry Andric MO.setMBB(Pred[j].getMBB());
334139f7f9bSDimitry Andric MadeChange = true;
335139f7f9bSDimitry Andric }
336139f7f9bSDimitry Andric ++j;
337139f7f9bSDimitry Andric }
338139f7f9bSDimitry Andric }
339139f7f9bSDimitry Andric return MadeChange;
340139f7f9bSDimitry Andric }
341139f7f9bSDimitry Andric
hasLoadFromStackSlot(const MachineInstr & MI,SmallVectorImpl<const MachineMemOperand * > & Accesses) const342*b5893f02SDimitry Andric bool TargetInstrInfo::hasLoadFromStackSlot(
343*b5893f02SDimitry Andric const MachineInstr &MI,
344*b5893f02SDimitry Andric SmallVectorImpl<const MachineMemOperand *> &Accesses) const {
345*b5893f02SDimitry Andric size_t StartSize = Accesses.size();
3463ca95b02SDimitry Andric for (MachineInstr::mmo_iterator o = MI.memoperands_begin(),
3473ca95b02SDimitry Andric oe = MI.memoperands_end();
3483ca95b02SDimitry Andric o != oe; ++o) {
349*b5893f02SDimitry Andric if ((*o)->isLoad() &&
350*b5893f02SDimitry Andric dyn_cast_or_null<FixedStackPseudoSourceValue>((*o)->getPseudoValue()))
351*b5893f02SDimitry Andric Accesses.push_back(*o);
352139f7f9bSDimitry Andric }
353*b5893f02SDimitry Andric return Accesses.size() != StartSize;
354139f7f9bSDimitry Andric }
355139f7f9bSDimitry Andric
hasStoreToStackSlot(const MachineInstr & MI,SmallVectorImpl<const MachineMemOperand * > & Accesses) const356*b5893f02SDimitry Andric bool TargetInstrInfo::hasStoreToStackSlot(
357*b5893f02SDimitry Andric const MachineInstr &MI,
358*b5893f02SDimitry Andric SmallVectorImpl<const MachineMemOperand *> &Accesses) const {
359*b5893f02SDimitry Andric size_t StartSize = Accesses.size();
3603ca95b02SDimitry Andric for (MachineInstr::mmo_iterator o = MI.memoperands_begin(),
3613ca95b02SDimitry Andric oe = MI.memoperands_end();
3623ca95b02SDimitry Andric o != oe; ++o) {
363*b5893f02SDimitry Andric if ((*o)->isStore() &&
364*b5893f02SDimitry Andric dyn_cast_or_null<FixedStackPseudoSourceValue>((*o)->getPseudoValue()))
365*b5893f02SDimitry Andric Accesses.push_back(*o);
366139f7f9bSDimitry Andric }
367*b5893f02SDimitry Andric return Accesses.size() != StartSize;
368139f7f9bSDimitry Andric }
369139f7f9bSDimitry Andric
getStackSlotRange(const TargetRegisterClass * RC,unsigned SubIdx,unsigned & Size,unsigned & Offset,const MachineFunction & MF) const370f785676fSDimitry Andric bool TargetInstrInfo::getStackSlotRange(const TargetRegisterClass *RC,
371f785676fSDimitry Andric unsigned SubIdx, unsigned &Size,
372f785676fSDimitry Andric unsigned &Offset,
373ff0cc061SDimitry Andric const MachineFunction &MF) const {
37451690af2SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
375f785676fSDimitry Andric if (!SubIdx) {
37651690af2SDimitry Andric Size = TRI->getSpillSize(*RC);
377f785676fSDimitry Andric Offset = 0;
378f785676fSDimitry Andric return true;
379f785676fSDimitry Andric }
380ff0cc061SDimitry Andric unsigned BitSize = TRI->getSubRegIdxSize(SubIdx);
381*b5893f02SDimitry Andric // Convert bit size to byte size.
382f785676fSDimitry Andric if (BitSize % 8)
383f785676fSDimitry Andric return false;
384f785676fSDimitry Andric
385ff0cc061SDimitry Andric int BitOffset = TRI->getSubRegIdxOffset(SubIdx);
386f785676fSDimitry Andric if (BitOffset < 0 || BitOffset % 8)
387f785676fSDimitry Andric return false;
388f785676fSDimitry Andric
389f785676fSDimitry Andric Size = BitSize /= 8;
390f785676fSDimitry Andric Offset = (unsigned)BitOffset / 8;
391f785676fSDimitry Andric
39251690af2SDimitry Andric assert(TRI->getSpillSize(*RC) >= (Offset + Size) && "bad subregister range");
393f785676fSDimitry Andric
3947d523365SDimitry Andric if (!MF.getDataLayout().isLittleEndian()) {
39551690af2SDimitry Andric Offset = TRI->getSpillSize(*RC) - (Offset + Size);
396f785676fSDimitry Andric }
397f785676fSDimitry Andric return true;
398f785676fSDimitry Andric }
399f785676fSDimitry Andric
reMaterialize(MachineBasicBlock & MBB,MachineBasicBlock::iterator I,unsigned DestReg,unsigned SubIdx,const MachineInstr & Orig,const TargetRegisterInfo & TRI) const400139f7f9bSDimitry Andric void TargetInstrInfo::reMaterialize(MachineBasicBlock &MBB,
401139f7f9bSDimitry Andric MachineBasicBlock::iterator I,
4023ca95b02SDimitry Andric unsigned DestReg, unsigned SubIdx,
4033ca95b02SDimitry Andric const MachineInstr &Orig,
404139f7f9bSDimitry Andric const TargetRegisterInfo &TRI) const {
4053ca95b02SDimitry Andric MachineInstr *MI = MBB.getParent()->CloneMachineInstr(&Orig);
406139f7f9bSDimitry Andric MI->substituteRegister(MI->getOperand(0).getReg(), DestReg, SubIdx, TRI);
407139f7f9bSDimitry Andric MBB.insert(I, MI);
408139f7f9bSDimitry Andric }
409139f7f9bSDimitry Andric
produceSameValue(const MachineInstr & MI0,const MachineInstr & MI1,const MachineRegisterInfo * MRI) const4103ca95b02SDimitry Andric bool TargetInstrInfo::produceSameValue(const MachineInstr &MI0,
4113ca95b02SDimitry Andric const MachineInstr &MI1,
412139f7f9bSDimitry Andric const MachineRegisterInfo *MRI) const {
4133ca95b02SDimitry Andric return MI0.isIdenticalTo(MI1, MachineInstr::IgnoreVRegDefs);
414139f7f9bSDimitry Andric }
415139f7f9bSDimitry Andric
duplicate(MachineBasicBlock & MBB,MachineBasicBlock::iterator InsertBefore,const MachineInstr & Orig) const4162cab237bSDimitry Andric MachineInstr &TargetInstrInfo::duplicate(MachineBasicBlock &MBB,
4172cab237bSDimitry Andric MachineBasicBlock::iterator InsertBefore, const MachineInstr &Orig) const {
4183ca95b02SDimitry Andric assert(!Orig.isNotDuplicable() && "Instruction cannot be duplicated");
4192cab237bSDimitry Andric MachineFunction &MF = *MBB.getParent();
4202cab237bSDimitry Andric return MF.CloneMachineInstrBundle(MBB, InsertBefore, Orig);
421139f7f9bSDimitry Andric }
422139f7f9bSDimitry Andric
423139f7f9bSDimitry Andric // If the COPY instruction in MI can be folded to a stack operation, return
424139f7f9bSDimitry Andric // the register class to use.
canFoldCopy(const MachineInstr & MI,unsigned FoldIdx)4253ca95b02SDimitry Andric static const TargetRegisterClass *canFoldCopy(const MachineInstr &MI,
426139f7f9bSDimitry Andric unsigned FoldIdx) {
4273ca95b02SDimitry Andric assert(MI.isCopy() && "MI must be a COPY instruction");
4283ca95b02SDimitry Andric if (MI.getNumOperands() != 2)
42991bc56edSDimitry Andric return nullptr;
430139f7f9bSDimitry Andric assert(FoldIdx<2 && "FoldIdx refers no nonexistent operand");
431139f7f9bSDimitry Andric
4323ca95b02SDimitry Andric const MachineOperand &FoldOp = MI.getOperand(FoldIdx);
4333ca95b02SDimitry Andric const MachineOperand &LiveOp = MI.getOperand(1 - FoldIdx);
434139f7f9bSDimitry Andric
435139f7f9bSDimitry Andric if (FoldOp.getSubReg() || LiveOp.getSubReg())
43691bc56edSDimitry Andric return nullptr;
437139f7f9bSDimitry Andric
438139f7f9bSDimitry Andric unsigned FoldReg = FoldOp.getReg();
439139f7f9bSDimitry Andric unsigned LiveReg = LiveOp.getReg();
440139f7f9bSDimitry Andric
441139f7f9bSDimitry Andric assert(TargetRegisterInfo::isVirtualRegister(FoldReg) &&
442139f7f9bSDimitry Andric "Cannot fold physregs");
443139f7f9bSDimitry Andric
4442cab237bSDimitry Andric const MachineRegisterInfo &MRI = MI.getMF()->getRegInfo();
445139f7f9bSDimitry Andric const TargetRegisterClass *RC = MRI.getRegClass(FoldReg);
446139f7f9bSDimitry Andric
447139f7f9bSDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(LiveOp.getReg()))
44891bc56edSDimitry Andric return RC->contains(LiveOp.getReg()) ? RC : nullptr;
449139f7f9bSDimitry Andric
450139f7f9bSDimitry Andric if (RC->hasSubClassEq(MRI.getRegClass(LiveReg)))
451139f7f9bSDimitry Andric return RC;
452139f7f9bSDimitry Andric
453139f7f9bSDimitry Andric // FIXME: Allow folding when register classes are memory compatible.
45491bc56edSDimitry Andric return nullptr;
455139f7f9bSDimitry Andric }
456139f7f9bSDimitry Andric
getNoop(MCInst & NopInst) const45751690af2SDimitry Andric void TargetInstrInfo::getNoop(MCInst &NopInst) const {
45851690af2SDimitry Andric llvm_unreachable("Not implemented");
45939d628a0SDimitry Andric }
46039d628a0SDimitry Andric
foldPatchpoint(MachineFunction & MF,MachineInstr & MI,ArrayRef<unsigned> Ops,int FrameIndex,const TargetInstrInfo & TII)4613ca95b02SDimitry Andric static MachineInstr *foldPatchpoint(MachineFunction &MF, MachineInstr &MI,
462ff0cc061SDimitry Andric ArrayRef<unsigned> Ops, int FrameIndex,
46391bc56edSDimitry Andric const TargetInstrInfo &TII) {
46491bc56edSDimitry Andric unsigned StartIdx = 0;
4653ca95b02SDimitry Andric switch (MI.getOpcode()) {
466d88c1a5aSDimitry Andric case TargetOpcode::STACKMAP: {
467d88c1a5aSDimitry Andric // StackMapLiveValues are foldable
468d88c1a5aSDimitry Andric StartIdx = StackMapOpers(&MI).getVarIdx();
46991bc56edSDimitry Andric break;
470d88c1a5aSDimitry Andric }
47191bc56edSDimitry Andric case TargetOpcode::PATCHPOINT: {
472d88c1a5aSDimitry Andric // For PatchPoint, the call args are not foldable (even if reported in the
473d88c1a5aSDimitry Andric // stackmap e.g. via anyregcc).
474d88c1a5aSDimitry Andric StartIdx = PatchPointOpers(&MI).getVarIdx();
475d88c1a5aSDimitry Andric break;
476d88c1a5aSDimitry Andric }
477d88c1a5aSDimitry Andric case TargetOpcode::STATEPOINT: {
478d88c1a5aSDimitry Andric // For statepoints, fold deopt and gc arguments, but not call arguments.
479d88c1a5aSDimitry Andric StartIdx = StatepointOpers(&MI).getVarIdx();
48091bc56edSDimitry Andric break;
48191bc56edSDimitry Andric }
48291bc56edSDimitry Andric default:
48391bc56edSDimitry Andric llvm_unreachable("unexpected stackmap opcode");
48491bc56edSDimitry Andric }
48591bc56edSDimitry Andric
48691bc56edSDimitry Andric // Return false if any operands requested for folding are not foldable (not
48791bc56edSDimitry Andric // part of the stackmap's live values).
488ff0cc061SDimitry Andric for (unsigned Op : Ops) {
489ff0cc061SDimitry Andric if (Op < StartIdx)
49091bc56edSDimitry Andric return nullptr;
49191bc56edSDimitry Andric }
49291bc56edSDimitry Andric
49391bc56edSDimitry Andric MachineInstr *NewMI =
4943ca95b02SDimitry Andric MF.CreateMachineInstr(TII.get(MI.getOpcode()), MI.getDebugLoc(), true);
49591bc56edSDimitry Andric MachineInstrBuilder MIB(MF, NewMI);
49691bc56edSDimitry Andric
49791bc56edSDimitry Andric // No need to fold return, the meta data, and function arguments
49891bc56edSDimitry Andric for (unsigned i = 0; i < StartIdx; ++i)
4997a7e6055SDimitry Andric MIB.add(MI.getOperand(i));
50091bc56edSDimitry Andric
5013ca95b02SDimitry Andric for (unsigned i = StartIdx; i < MI.getNumOperands(); ++i) {
5023ca95b02SDimitry Andric MachineOperand &MO = MI.getOperand(i);
503d88c1a5aSDimitry Andric if (is_contained(Ops, i)) {
50491bc56edSDimitry Andric unsigned SpillSize;
50591bc56edSDimitry Andric unsigned SpillOffset;
50691bc56edSDimitry Andric // Compute the spill slot size and offset.
50791bc56edSDimitry Andric const TargetRegisterClass *RC =
50891bc56edSDimitry Andric MF.getRegInfo().getRegClass(MO.getReg());
509ff0cc061SDimitry Andric bool Valid =
510ff0cc061SDimitry Andric TII.getStackSlotRange(RC, MO.getSubReg(), SpillSize, SpillOffset, MF);
51191bc56edSDimitry Andric if (!Valid)
51291bc56edSDimitry Andric report_fatal_error("cannot spill patchpoint subregister operand");
51391bc56edSDimitry Andric MIB.addImm(StackMaps::IndirectMemRefOp);
51491bc56edSDimitry Andric MIB.addImm(SpillSize);
51591bc56edSDimitry Andric MIB.addFrameIndex(FrameIndex);
51691bc56edSDimitry Andric MIB.addImm(SpillOffset);
51791bc56edSDimitry Andric }
51891bc56edSDimitry Andric else
5197a7e6055SDimitry Andric MIB.add(MO);
52091bc56edSDimitry Andric }
52191bc56edSDimitry Andric return NewMI;
52291bc56edSDimitry Andric }
52391bc56edSDimitry Andric
foldMemoryOperand(MachineInstr & MI,ArrayRef<unsigned> Ops,int FI,LiveIntervals * LIS) const5243ca95b02SDimitry Andric MachineInstr *TargetInstrInfo::foldMemoryOperand(MachineInstr &MI,
5253ca95b02SDimitry Andric ArrayRef<unsigned> Ops, int FI,
5263ca95b02SDimitry Andric LiveIntervals *LIS) const {
5273ca95b02SDimitry Andric auto Flags = MachineMemOperand::MONone;
5282cab237bSDimitry Andric for (unsigned OpIdx : Ops)
5292cab237bSDimitry Andric Flags |= MI.getOperand(OpIdx).isDef() ? MachineMemOperand::MOStore
5302cab237bSDimitry Andric : MachineMemOperand::MOLoad;
531139f7f9bSDimitry Andric
5323ca95b02SDimitry Andric MachineBasicBlock *MBB = MI.getParent();
533139f7f9bSDimitry Andric assert(MBB && "foldMemoryOperand needs an inserted instruction");
534139f7f9bSDimitry Andric MachineFunction &MF = *MBB->getParent();
535139f7f9bSDimitry Andric
536d88c1a5aSDimitry Andric // If we're not folding a load into a subreg, the size of the load is the
537d88c1a5aSDimitry Andric // size of the spill slot. But if we are, we need to figure out what the
538d88c1a5aSDimitry Andric // actual load size is.
539d88c1a5aSDimitry Andric int64_t MemSize = 0;
540d88c1a5aSDimitry Andric const MachineFrameInfo &MFI = MF.getFrameInfo();
541d88c1a5aSDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
542d88c1a5aSDimitry Andric
543d88c1a5aSDimitry Andric if (Flags & MachineMemOperand::MOStore) {
544d88c1a5aSDimitry Andric MemSize = MFI.getObjectSize(FI);
545d88c1a5aSDimitry Andric } else {
5462cab237bSDimitry Andric for (unsigned OpIdx : Ops) {
547d88c1a5aSDimitry Andric int64_t OpSize = MFI.getObjectSize(FI);
548d88c1a5aSDimitry Andric
5492cab237bSDimitry Andric if (auto SubReg = MI.getOperand(OpIdx).getSubReg()) {
550d88c1a5aSDimitry Andric unsigned SubRegSize = TRI->getSubRegIdxSize(SubReg);
551d88c1a5aSDimitry Andric if (SubRegSize > 0 && !(SubRegSize % 8))
552d88c1a5aSDimitry Andric OpSize = SubRegSize / 8;
553d88c1a5aSDimitry Andric }
554d88c1a5aSDimitry Andric
555d88c1a5aSDimitry Andric MemSize = std::max(MemSize, OpSize);
556d88c1a5aSDimitry Andric }
557d88c1a5aSDimitry Andric }
558d88c1a5aSDimitry Andric
559d88c1a5aSDimitry Andric assert(MemSize && "Did not expect a zero-sized stack slot");
560d88c1a5aSDimitry Andric
56191bc56edSDimitry Andric MachineInstr *NewMI = nullptr;
56291bc56edSDimitry Andric
5633ca95b02SDimitry Andric if (MI.getOpcode() == TargetOpcode::STACKMAP ||
564d88c1a5aSDimitry Andric MI.getOpcode() == TargetOpcode::PATCHPOINT ||
565d88c1a5aSDimitry Andric MI.getOpcode() == TargetOpcode::STATEPOINT) {
56691bc56edSDimitry Andric // Fold stackmap/patchpoint.
56791bc56edSDimitry Andric NewMI = foldPatchpoint(MF, MI, Ops, FI, *this);
56897bc6c73SDimitry Andric if (NewMI)
56997bc6c73SDimitry Andric MBB->insert(MI, NewMI);
57091bc56edSDimitry Andric } else {
571139f7f9bSDimitry Andric // Ask the target to do the actual folding.
5723ca95b02SDimitry Andric NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, FI, LIS);
57391bc56edSDimitry Andric }
57491bc56edSDimitry Andric
57591bc56edSDimitry Andric if (NewMI) {
576*b5893f02SDimitry Andric NewMI->setMemRefs(MF, MI.memoperands());
577139f7f9bSDimitry Andric // Add a memory operand, foldMemoryOperandImpl doesn't do that.
578139f7f9bSDimitry Andric assert((!(Flags & MachineMemOperand::MOStore) ||
579139f7f9bSDimitry Andric NewMI->mayStore()) &&
580139f7f9bSDimitry Andric "Folded a def to a non-store!");
581139f7f9bSDimitry Andric assert((!(Flags & MachineMemOperand::MOLoad) ||
582139f7f9bSDimitry Andric NewMI->mayLoad()) &&
583139f7f9bSDimitry Andric "Folded a use to a non-load!");
584139f7f9bSDimitry Andric assert(MFI.getObjectOffset(FI) != -1);
5857d523365SDimitry Andric MachineMemOperand *MMO = MF.getMachineMemOperand(
586d88c1a5aSDimitry Andric MachinePointerInfo::getFixedStack(MF, FI), Flags, MemSize,
587139f7f9bSDimitry Andric MFI.getObjectAlignment(FI));
588139f7f9bSDimitry Andric NewMI->addMemOperand(MF, MMO);
589139f7f9bSDimitry Andric
59097bc6c73SDimitry Andric return NewMI;
591139f7f9bSDimitry Andric }
592139f7f9bSDimitry Andric
593139f7f9bSDimitry Andric // Straight COPY may fold as load/store.
5943ca95b02SDimitry Andric if (!MI.isCopy() || Ops.size() != 1)
59591bc56edSDimitry Andric return nullptr;
596139f7f9bSDimitry Andric
597139f7f9bSDimitry Andric const TargetRegisterClass *RC = canFoldCopy(MI, Ops[0]);
598139f7f9bSDimitry Andric if (!RC)
59991bc56edSDimitry Andric return nullptr;
600139f7f9bSDimitry Andric
6013ca95b02SDimitry Andric const MachineOperand &MO = MI.getOperand(1 - Ops[0]);
602139f7f9bSDimitry Andric MachineBasicBlock::iterator Pos = MI;
603139f7f9bSDimitry Andric
604139f7f9bSDimitry Andric if (Flags == MachineMemOperand::MOStore)
605139f7f9bSDimitry Andric storeRegToStackSlot(*MBB, Pos, MO.getReg(), MO.isKill(), FI, RC, TRI);
606139f7f9bSDimitry Andric else
607139f7f9bSDimitry Andric loadRegFromStackSlot(*MBB, Pos, MO.getReg(), FI, RC, TRI);
6083ca95b02SDimitry Andric return &*--Pos;
609139f7f9bSDimitry Andric }
610139f7f9bSDimitry Andric
foldMemoryOperand(MachineInstr & MI,ArrayRef<unsigned> Ops,MachineInstr & LoadMI,LiveIntervals * LIS) const6112cab237bSDimitry Andric MachineInstr *TargetInstrInfo::foldMemoryOperand(MachineInstr &MI,
6122cab237bSDimitry Andric ArrayRef<unsigned> Ops,
6132cab237bSDimitry Andric MachineInstr &LoadMI,
6142cab237bSDimitry Andric LiveIntervals *LIS) const {
6152cab237bSDimitry Andric assert(LoadMI.canFoldAsLoad() && "LoadMI isn't foldable!");
6162cab237bSDimitry Andric #ifndef NDEBUG
6172cab237bSDimitry Andric for (unsigned OpIdx : Ops)
6182cab237bSDimitry Andric assert(MI.getOperand(OpIdx).isUse() && "Folding load into def!");
6192cab237bSDimitry Andric #endif
6202cab237bSDimitry Andric
6212cab237bSDimitry Andric MachineBasicBlock &MBB = *MI.getParent();
6222cab237bSDimitry Andric MachineFunction &MF = *MBB.getParent();
6232cab237bSDimitry Andric
6242cab237bSDimitry Andric // Ask the target to do the actual folding.
6252cab237bSDimitry Andric MachineInstr *NewMI = nullptr;
6262cab237bSDimitry Andric int FrameIndex = 0;
6272cab237bSDimitry Andric
6282cab237bSDimitry Andric if ((MI.getOpcode() == TargetOpcode::STACKMAP ||
6292cab237bSDimitry Andric MI.getOpcode() == TargetOpcode::PATCHPOINT ||
6302cab237bSDimitry Andric MI.getOpcode() == TargetOpcode::STATEPOINT) &&
6312cab237bSDimitry Andric isLoadFromStackSlot(LoadMI, FrameIndex)) {
6322cab237bSDimitry Andric // Fold stackmap/patchpoint.
6332cab237bSDimitry Andric NewMI = foldPatchpoint(MF, MI, Ops, FrameIndex, *this);
6342cab237bSDimitry Andric if (NewMI)
6352cab237bSDimitry Andric NewMI = &*MBB.insert(MI, NewMI);
6362cab237bSDimitry Andric } else {
6372cab237bSDimitry Andric // Ask the target to do the actual folding.
6382cab237bSDimitry Andric NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI, LIS);
6392cab237bSDimitry Andric }
6402cab237bSDimitry Andric
6412cab237bSDimitry Andric if (!NewMI)
6422cab237bSDimitry Andric return nullptr;
6432cab237bSDimitry Andric
6442cab237bSDimitry Andric // Copy the memoperands from the load to the folded instruction.
6452cab237bSDimitry Andric if (MI.memoperands_empty()) {
646*b5893f02SDimitry Andric NewMI->setMemRefs(MF, LoadMI.memoperands());
6472cab237bSDimitry Andric } else {
6482cab237bSDimitry Andric // Handle the rare case of folding multiple loads.
649*b5893f02SDimitry Andric NewMI->setMemRefs(MF, MI.memoperands());
6502cab237bSDimitry Andric for (MachineInstr::mmo_iterator I = LoadMI.memoperands_begin(),
6512cab237bSDimitry Andric E = LoadMI.memoperands_end();
6522cab237bSDimitry Andric I != E; ++I) {
6532cab237bSDimitry Andric NewMI->addMemOperand(MF, *I);
6542cab237bSDimitry Andric }
6552cab237bSDimitry Andric }
6562cab237bSDimitry Andric return NewMI;
6572cab237bSDimitry Andric }
6582cab237bSDimitry Andric
hasReassociableOperands(const MachineInstr & Inst,const MachineBasicBlock * MBB) const6597d523365SDimitry Andric bool TargetInstrInfo::hasReassociableOperands(
6607d523365SDimitry Andric const MachineInstr &Inst, const MachineBasicBlock *MBB) const {
6617d523365SDimitry Andric const MachineOperand &Op1 = Inst.getOperand(1);
6627d523365SDimitry Andric const MachineOperand &Op2 = Inst.getOperand(2);
6637d523365SDimitry Andric const MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo();
6647d523365SDimitry Andric
6657d523365SDimitry Andric // We need virtual register definitions for the operands that we will
6667d523365SDimitry Andric // reassociate.
6677d523365SDimitry Andric MachineInstr *MI1 = nullptr;
6687d523365SDimitry Andric MachineInstr *MI2 = nullptr;
6697d523365SDimitry Andric if (Op1.isReg() && TargetRegisterInfo::isVirtualRegister(Op1.getReg()))
6707d523365SDimitry Andric MI1 = MRI.getUniqueVRegDef(Op1.getReg());
6717d523365SDimitry Andric if (Op2.isReg() && TargetRegisterInfo::isVirtualRegister(Op2.getReg()))
6727d523365SDimitry Andric MI2 = MRI.getUniqueVRegDef(Op2.getReg());
6737d523365SDimitry Andric
6747d523365SDimitry Andric // And they need to be in the trace (otherwise, they won't have a depth).
6757d523365SDimitry Andric return MI1 && MI2 && MI1->getParent() == MBB && MI2->getParent() == MBB;
6767d523365SDimitry Andric }
6777d523365SDimitry Andric
hasReassociableSibling(const MachineInstr & Inst,bool & Commuted) const6787d523365SDimitry Andric bool TargetInstrInfo::hasReassociableSibling(const MachineInstr &Inst,
6797d523365SDimitry Andric bool &Commuted) const {
6807d523365SDimitry Andric const MachineBasicBlock *MBB = Inst.getParent();
6817d523365SDimitry Andric const MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo();
6827d523365SDimitry Andric MachineInstr *MI1 = MRI.getUniqueVRegDef(Inst.getOperand(1).getReg());
6837d523365SDimitry Andric MachineInstr *MI2 = MRI.getUniqueVRegDef(Inst.getOperand(2).getReg());
6847d523365SDimitry Andric unsigned AssocOpcode = Inst.getOpcode();
6857d523365SDimitry Andric
6867d523365SDimitry Andric // If only one operand has the same opcode and it's the second source operand,
6877d523365SDimitry Andric // the operands must be commuted.
6887d523365SDimitry Andric Commuted = MI1->getOpcode() != AssocOpcode && MI2->getOpcode() == AssocOpcode;
6897d523365SDimitry Andric if (Commuted)
6907d523365SDimitry Andric std::swap(MI1, MI2);
6917d523365SDimitry Andric
6927d523365SDimitry Andric // 1. The previous instruction must be the same type as Inst.
6937d523365SDimitry Andric // 2. The previous instruction must have virtual register definitions for its
6947d523365SDimitry Andric // operands in the same basic block as Inst.
6957d523365SDimitry Andric // 3. The previous instruction's result must only be used by Inst.
6967d523365SDimitry Andric return MI1->getOpcode() == AssocOpcode &&
6977d523365SDimitry Andric hasReassociableOperands(*MI1, MBB) &&
6987d523365SDimitry Andric MRI.hasOneNonDBGUse(MI1->getOperand(0).getReg());
6997d523365SDimitry Andric }
7007d523365SDimitry Andric
7017d523365SDimitry Andric // 1. The operation must be associative and commutative.
7027d523365SDimitry Andric // 2. The instruction must have virtual register definitions for its
7037d523365SDimitry Andric // operands in the same basic block.
7047d523365SDimitry Andric // 3. The instruction must have a reassociable sibling.
isReassociationCandidate(const MachineInstr & Inst,bool & Commuted) const7057d523365SDimitry Andric bool TargetInstrInfo::isReassociationCandidate(const MachineInstr &Inst,
7067d523365SDimitry Andric bool &Commuted) const {
7077d523365SDimitry Andric return isAssociativeAndCommutative(Inst) &&
7087d523365SDimitry Andric hasReassociableOperands(Inst, Inst.getParent()) &&
7097d523365SDimitry Andric hasReassociableSibling(Inst, Commuted);
7107d523365SDimitry Andric }
7117d523365SDimitry Andric
7127d523365SDimitry Andric // The concept of the reassociation pass is that these operations can benefit
7137d523365SDimitry Andric // from this kind of transformation:
7147d523365SDimitry Andric //
7157d523365SDimitry Andric // A = ? op ?
7167d523365SDimitry Andric // B = A op X (Prev)
7177d523365SDimitry Andric // C = B op Y (Root)
7187d523365SDimitry Andric // -->
7197d523365SDimitry Andric // A = ? op ?
7207d523365SDimitry Andric // B = X op Y
7217d523365SDimitry Andric // C = A op B
7227d523365SDimitry Andric //
7237d523365SDimitry Andric // breaking the dependency between A and B, allowing them to be executed in
7247d523365SDimitry Andric // parallel (or back-to-back in a pipeline) instead of depending on each other.
7257d523365SDimitry Andric
7267d523365SDimitry Andric // FIXME: This has the potential to be expensive (compile time) while not
7277d523365SDimitry Andric // improving the code at all. Some ways to limit the overhead:
7287d523365SDimitry Andric // 1. Track successful transforms; bail out if hit rate gets too low.
7297d523365SDimitry Andric // 2. Only enable at -O3 or some other non-default optimization level.
7307d523365SDimitry Andric // 3. Pre-screen pattern candidates here: if an operand of the previous
7317d523365SDimitry Andric // instruction is known to not increase the critical path, then don't match
7327d523365SDimitry Andric // that pattern.
getMachineCombinerPatterns(MachineInstr & Root,SmallVectorImpl<MachineCombinerPattern> & Patterns) const7337d523365SDimitry Andric bool TargetInstrInfo::getMachineCombinerPatterns(
7347d523365SDimitry Andric MachineInstr &Root,
7357d523365SDimitry Andric SmallVectorImpl<MachineCombinerPattern> &Patterns) const {
7367d523365SDimitry Andric bool Commute;
7377d523365SDimitry Andric if (isReassociationCandidate(Root, Commute)) {
7387d523365SDimitry Andric // We found a sequence of instructions that may be suitable for a
7397d523365SDimitry Andric // reassociation of operands to increase ILP. Specify each commutation
7407d523365SDimitry Andric // possibility for the Prev instruction in the sequence and let the
7417d523365SDimitry Andric // machine combiner decide if changing the operands is worthwhile.
7427d523365SDimitry Andric if (Commute) {
7437d523365SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_AX_YB);
7447d523365SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_XA_YB);
7457d523365SDimitry Andric } else {
7467d523365SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_AX_BY);
7477d523365SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_XA_BY);
7487d523365SDimitry Andric }
7497d523365SDimitry Andric return true;
7507d523365SDimitry Andric }
7517d523365SDimitry Andric
7527d523365SDimitry Andric return false;
7537d523365SDimitry Andric }
7542cab237bSDimitry Andric
7553ca95b02SDimitry Andric /// Return true when a code sequence can improve loop throughput.
7563ca95b02SDimitry Andric bool
isThroughputPattern(MachineCombinerPattern Pattern) const7573ca95b02SDimitry Andric TargetInstrInfo::isThroughputPattern(MachineCombinerPattern Pattern) const {
7583ca95b02SDimitry Andric return false;
7593ca95b02SDimitry Andric }
7602cab237bSDimitry Andric
7617d523365SDimitry Andric /// Attempt the reassociation transformation to reduce critical path length.
7627d523365SDimitry Andric /// See the above comments before getMachineCombinerPatterns().
reassociateOps(MachineInstr & Root,MachineInstr & Prev,MachineCombinerPattern Pattern,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,DenseMap<unsigned,unsigned> & InstrIdxForVirtReg) const7637d523365SDimitry Andric void TargetInstrInfo::reassociateOps(
7647d523365SDimitry Andric MachineInstr &Root, MachineInstr &Prev,
7657d523365SDimitry Andric MachineCombinerPattern Pattern,
7667d523365SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
7677d523365SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
7687d523365SDimitry Andric DenseMap<unsigned, unsigned> &InstrIdxForVirtReg) const {
7692cab237bSDimitry Andric MachineFunction *MF = Root.getMF();
7707d523365SDimitry Andric MachineRegisterInfo &MRI = MF->getRegInfo();
7717d523365SDimitry Andric const TargetInstrInfo *TII = MF->getSubtarget().getInstrInfo();
7727d523365SDimitry Andric const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo();
7737d523365SDimitry Andric const TargetRegisterClass *RC = Root.getRegClassConstraint(0, TII, TRI);
7747d523365SDimitry Andric
7757d523365SDimitry Andric // This array encodes the operand index for each parameter because the
7767d523365SDimitry Andric // operands may be commuted. Each row corresponds to a pattern value,
7777d523365SDimitry Andric // and each column specifies the index of A, B, X, Y.
7787d523365SDimitry Andric unsigned OpIdx[4][4] = {
7797d523365SDimitry Andric { 1, 1, 2, 2 },
7807d523365SDimitry Andric { 1, 2, 2, 1 },
7817d523365SDimitry Andric { 2, 1, 1, 2 },
7827d523365SDimitry Andric { 2, 2, 1, 1 }
7837d523365SDimitry Andric };
7847d523365SDimitry Andric
7857d523365SDimitry Andric int Row;
7867d523365SDimitry Andric switch (Pattern) {
7877d523365SDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY: Row = 0; break;
7887d523365SDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB: Row = 1; break;
7897d523365SDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY: Row = 2; break;
7907d523365SDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB: Row = 3; break;
7917d523365SDimitry Andric default: llvm_unreachable("unexpected MachineCombinerPattern");
7927d523365SDimitry Andric }
7937d523365SDimitry Andric
7947d523365SDimitry Andric MachineOperand &OpA = Prev.getOperand(OpIdx[Row][0]);
7957d523365SDimitry Andric MachineOperand &OpB = Root.getOperand(OpIdx[Row][1]);
7967d523365SDimitry Andric MachineOperand &OpX = Prev.getOperand(OpIdx[Row][2]);
7977d523365SDimitry Andric MachineOperand &OpY = Root.getOperand(OpIdx[Row][3]);
7987d523365SDimitry Andric MachineOperand &OpC = Root.getOperand(0);
7997d523365SDimitry Andric
8007d523365SDimitry Andric unsigned RegA = OpA.getReg();
8017d523365SDimitry Andric unsigned RegB = OpB.getReg();
8027d523365SDimitry Andric unsigned RegX = OpX.getReg();
8037d523365SDimitry Andric unsigned RegY = OpY.getReg();
8047d523365SDimitry Andric unsigned RegC = OpC.getReg();
8057d523365SDimitry Andric
8067d523365SDimitry Andric if (TargetRegisterInfo::isVirtualRegister(RegA))
8077d523365SDimitry Andric MRI.constrainRegClass(RegA, RC);
8087d523365SDimitry Andric if (TargetRegisterInfo::isVirtualRegister(RegB))
8097d523365SDimitry Andric MRI.constrainRegClass(RegB, RC);
8107d523365SDimitry Andric if (TargetRegisterInfo::isVirtualRegister(RegX))
8117d523365SDimitry Andric MRI.constrainRegClass(RegX, RC);
8127d523365SDimitry Andric if (TargetRegisterInfo::isVirtualRegister(RegY))
8137d523365SDimitry Andric MRI.constrainRegClass(RegY, RC);
8147d523365SDimitry Andric if (TargetRegisterInfo::isVirtualRegister(RegC))
8157d523365SDimitry Andric MRI.constrainRegClass(RegC, RC);
8167d523365SDimitry Andric
8177d523365SDimitry Andric // Create a new virtual register for the result of (X op Y) instead of
8187d523365SDimitry Andric // recycling RegB because the MachineCombiner's computation of the critical
8197d523365SDimitry Andric // path requires a new register definition rather than an existing one.
8207d523365SDimitry Andric unsigned NewVR = MRI.createVirtualRegister(RC);
8217d523365SDimitry Andric InstrIdxForVirtReg.insert(std::make_pair(NewVR, 0));
8227d523365SDimitry Andric
8237d523365SDimitry Andric unsigned Opcode = Root.getOpcode();
8247d523365SDimitry Andric bool KillA = OpA.isKill();
8257d523365SDimitry Andric bool KillX = OpX.isKill();
8267d523365SDimitry Andric bool KillY = OpY.isKill();
8277d523365SDimitry Andric
8287d523365SDimitry Andric // Create new instructions for insertion.
8297d523365SDimitry Andric MachineInstrBuilder MIB1 =
8307d523365SDimitry Andric BuildMI(*MF, Prev.getDebugLoc(), TII->get(Opcode), NewVR)
8317d523365SDimitry Andric .addReg(RegX, getKillRegState(KillX))
8327d523365SDimitry Andric .addReg(RegY, getKillRegState(KillY));
8337d523365SDimitry Andric MachineInstrBuilder MIB2 =
8347d523365SDimitry Andric BuildMI(*MF, Root.getDebugLoc(), TII->get(Opcode), RegC)
8357d523365SDimitry Andric .addReg(RegA, getKillRegState(KillA))
8367d523365SDimitry Andric .addReg(NewVR, getKillRegState(true));
8377d523365SDimitry Andric
8387d523365SDimitry Andric setSpecialOperandAttr(Root, Prev, *MIB1, *MIB2);
8397d523365SDimitry Andric
8407d523365SDimitry Andric // Record new instructions for insertion and old instructions for deletion.
8417d523365SDimitry Andric InsInstrs.push_back(MIB1);
8427d523365SDimitry Andric InsInstrs.push_back(MIB2);
8437d523365SDimitry Andric DelInstrs.push_back(&Prev);
8447d523365SDimitry Andric DelInstrs.push_back(&Root);
8457d523365SDimitry Andric }
8467d523365SDimitry Andric
genAlternativeCodeSequence(MachineInstr & Root,MachineCombinerPattern Pattern,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,DenseMap<unsigned,unsigned> & InstIdxForVirtReg) const8477d523365SDimitry Andric void TargetInstrInfo::genAlternativeCodeSequence(
8487d523365SDimitry Andric MachineInstr &Root, MachineCombinerPattern Pattern,
8497d523365SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
8507d523365SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
8517d523365SDimitry Andric DenseMap<unsigned, unsigned> &InstIdxForVirtReg) const {
8522cab237bSDimitry Andric MachineRegisterInfo &MRI = Root.getMF()->getRegInfo();
8537d523365SDimitry Andric
8547d523365SDimitry Andric // Select the previous instruction in the sequence based on the input pattern.
8557d523365SDimitry Andric MachineInstr *Prev = nullptr;
8567d523365SDimitry Andric switch (Pattern) {
8577d523365SDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY:
8587d523365SDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY:
8597d523365SDimitry Andric Prev = MRI.getUniqueVRegDef(Root.getOperand(1).getReg());
8607d523365SDimitry Andric break;
8617d523365SDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB:
8627d523365SDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB:
8637d523365SDimitry Andric Prev = MRI.getUniqueVRegDef(Root.getOperand(2).getReg());
8647d523365SDimitry Andric break;
8657d523365SDimitry Andric default:
8667d523365SDimitry Andric break;
8677d523365SDimitry Andric }
8687d523365SDimitry Andric
8697d523365SDimitry Andric assert(Prev && "Unknown pattern for machine combiner");
8707d523365SDimitry Andric
8717d523365SDimitry Andric reassociateOps(Root, *Prev, Pattern, InsInstrs, DelInstrs, InstIdxForVirtReg);
8727d523365SDimitry Andric }
8737d523365SDimitry Andric
isReallyTriviallyReMaterializableGeneric(const MachineInstr & MI,AliasAnalysis * AA) const8743ca95b02SDimitry Andric bool TargetInstrInfo::isReallyTriviallyReMaterializableGeneric(
8753ca95b02SDimitry Andric const MachineInstr &MI, AliasAnalysis *AA) const {
8762cab237bSDimitry Andric const MachineFunction &MF = *MI.getMF();
877139f7f9bSDimitry Andric const MachineRegisterInfo &MRI = MF.getRegInfo();
878139f7f9bSDimitry Andric
879139f7f9bSDimitry Andric // Remat clients assume operand 0 is the defined register.
8803ca95b02SDimitry Andric if (!MI.getNumOperands() || !MI.getOperand(0).isReg())
881139f7f9bSDimitry Andric return false;
8823ca95b02SDimitry Andric unsigned DefReg = MI.getOperand(0).getReg();
883139f7f9bSDimitry Andric
884139f7f9bSDimitry Andric // A sub-register definition can only be rematerialized if the instruction
885139f7f9bSDimitry Andric // doesn't read the other parts of the register. Otherwise it is really a
886139f7f9bSDimitry Andric // read-modify-write operation on the full virtual register which cannot be
887139f7f9bSDimitry Andric // moved safely.
888139f7f9bSDimitry Andric if (TargetRegisterInfo::isVirtualRegister(DefReg) &&
8893ca95b02SDimitry Andric MI.getOperand(0).getSubReg() && MI.readsVirtualRegister(DefReg))
890139f7f9bSDimitry Andric return false;
891139f7f9bSDimitry Andric
892139f7f9bSDimitry Andric // A load from a fixed stack slot can be rematerialized. This may be
893139f7f9bSDimitry Andric // redundant with subsequent checks, but it's target-independent,
894139f7f9bSDimitry Andric // simple, and a common case.
895139f7f9bSDimitry Andric int FrameIdx = 0;
89639d628a0SDimitry Andric if (isLoadFromStackSlot(MI, FrameIdx) &&
897d88c1a5aSDimitry Andric MF.getFrameInfo().isImmutableObjectIndex(FrameIdx))
898139f7f9bSDimitry Andric return true;
899139f7f9bSDimitry Andric
900139f7f9bSDimitry Andric // Avoid instructions obviously unsafe for remat.
9013ca95b02SDimitry Andric if (MI.isNotDuplicable() || MI.mayStore() || MI.hasUnmodeledSideEffects())
902139f7f9bSDimitry Andric return false;
903139f7f9bSDimitry Andric
904139f7f9bSDimitry Andric // Don't remat inline asm. We have no idea how expensive it is
905139f7f9bSDimitry Andric // even if it's side effect free.
9063ca95b02SDimitry Andric if (MI.isInlineAsm())
907139f7f9bSDimitry Andric return false;
908139f7f9bSDimitry Andric
909139f7f9bSDimitry Andric // Avoid instructions which load from potentially varying memory.
910d88c1a5aSDimitry Andric if (MI.mayLoad() && !MI.isDereferenceableInvariantLoad(AA))
911139f7f9bSDimitry Andric return false;
912139f7f9bSDimitry Andric
913139f7f9bSDimitry Andric // If any of the registers accessed are non-constant, conservatively assume
914139f7f9bSDimitry Andric // the instruction is not rematerializable.
9153ca95b02SDimitry Andric for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
9163ca95b02SDimitry Andric const MachineOperand &MO = MI.getOperand(i);
917139f7f9bSDimitry Andric if (!MO.isReg()) continue;
918139f7f9bSDimitry Andric unsigned Reg = MO.getReg();
919139f7f9bSDimitry Andric if (Reg == 0)
920139f7f9bSDimitry Andric continue;
921139f7f9bSDimitry Andric
922139f7f9bSDimitry Andric // Check for a well-behaved physical register.
923139f7f9bSDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
924139f7f9bSDimitry Andric if (MO.isUse()) {
925139f7f9bSDimitry Andric // If the physreg has no defs anywhere, it's just an ambient register
926139f7f9bSDimitry Andric // and we can freely move its uses. Alternatively, if it's allocatable,
927139f7f9bSDimitry Andric // it could get allocated to something with a def during allocation.
928d88c1a5aSDimitry Andric if (!MRI.isConstantPhysReg(Reg))
929139f7f9bSDimitry Andric return false;
930139f7f9bSDimitry Andric } else {
931139f7f9bSDimitry Andric // A physreg def. We can't remat it.
932139f7f9bSDimitry Andric return false;
933139f7f9bSDimitry Andric }
934139f7f9bSDimitry Andric continue;
935139f7f9bSDimitry Andric }
936139f7f9bSDimitry Andric
937139f7f9bSDimitry Andric // Only allow one virtual-register def. There may be multiple defs of the
938139f7f9bSDimitry Andric // same virtual register, though.
939139f7f9bSDimitry Andric if (MO.isDef() && Reg != DefReg)
940139f7f9bSDimitry Andric return false;
941139f7f9bSDimitry Andric
942139f7f9bSDimitry Andric // Don't allow any virtual-register uses. Rematting an instruction with
943139f7f9bSDimitry Andric // virtual register uses would length the live ranges of the uses, which
944139f7f9bSDimitry Andric // is not necessarily a good idea, certainly not "trivial".
945139f7f9bSDimitry Andric if (MO.isUse())
946139f7f9bSDimitry Andric return false;
947139f7f9bSDimitry Andric }
948139f7f9bSDimitry Andric
949139f7f9bSDimitry Andric // Everything checked out.
950139f7f9bSDimitry Andric return true;
951139f7f9bSDimitry Andric }
952139f7f9bSDimitry Andric
getSPAdjust(const MachineInstr & MI) const9533ca95b02SDimitry Andric int TargetInstrInfo::getSPAdjust(const MachineInstr &MI) const {
9542cab237bSDimitry Andric const MachineFunction *MF = MI.getMF();
95539d628a0SDimitry Andric const TargetFrameLowering *TFI = MF->getSubtarget().getFrameLowering();
95639d628a0SDimitry Andric bool StackGrowsDown =
95739d628a0SDimitry Andric TFI->getStackGrowthDirection() == TargetFrameLowering::StackGrowsDown;
95839d628a0SDimitry Andric
959ff0cc061SDimitry Andric unsigned FrameSetupOpcode = getCallFrameSetupOpcode();
960ff0cc061SDimitry Andric unsigned FrameDestroyOpcode = getCallFrameDestroyOpcode();
96139d628a0SDimitry Andric
9627a7e6055SDimitry Andric if (!isFrameInstr(MI))
96339d628a0SDimitry Andric return 0;
96439d628a0SDimitry Andric
9657a7e6055SDimitry Andric int SPAdj = TFI->alignSPAdjust(getFrameSize(MI));
96639d628a0SDimitry Andric
9673ca95b02SDimitry Andric if ((!StackGrowsDown && MI.getOpcode() == FrameSetupOpcode) ||
9683ca95b02SDimitry Andric (StackGrowsDown && MI.getOpcode() == FrameDestroyOpcode))
96939d628a0SDimitry Andric SPAdj = -SPAdj;
97039d628a0SDimitry Andric
97139d628a0SDimitry Andric return SPAdj;
97239d628a0SDimitry Andric }
97339d628a0SDimitry Andric
974139f7f9bSDimitry Andric /// isSchedulingBoundary - Test if the given instruction should be
975139f7f9bSDimitry Andric /// considered a scheduling boundary. This primarily includes labels
976139f7f9bSDimitry Andric /// and terminators.
isSchedulingBoundary(const MachineInstr & MI,const MachineBasicBlock * MBB,const MachineFunction & MF) const9773ca95b02SDimitry Andric bool TargetInstrInfo::isSchedulingBoundary(const MachineInstr &MI,
978139f7f9bSDimitry Andric const MachineBasicBlock *MBB,
979139f7f9bSDimitry Andric const MachineFunction &MF) const {
980139f7f9bSDimitry Andric // Terminators and labels can't be scheduled around.
9813ca95b02SDimitry Andric if (MI.isTerminator() || MI.isPosition())
982139f7f9bSDimitry Andric return true;
983139f7f9bSDimitry Andric
984139f7f9bSDimitry Andric // Don't attempt to schedule around any instruction that defines
985139f7f9bSDimitry Andric // a stack-oriented pointer, as it's unlikely to be profitable. This
986139f7f9bSDimitry Andric // saves compile time, because it doesn't require every single
987139f7f9bSDimitry Andric // stack slot reference to depend on the instruction that does the
988139f7f9bSDimitry Andric // modification.
98939d628a0SDimitry Andric const TargetLowering &TLI = *MF.getSubtarget().getTargetLowering();
99039d628a0SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
9913ca95b02SDimitry Andric return MI.modifiesRegister(TLI.getStackPointerRegisterToSaveRestore(), TRI);
992139f7f9bSDimitry Andric }
993139f7f9bSDimitry Andric
994139f7f9bSDimitry Andric // Provide a global flag for disabling the PreRA hazard recognizer that targets
995139f7f9bSDimitry Andric // may choose to honor.
usePreRAHazardRecognizer() const996139f7f9bSDimitry Andric bool TargetInstrInfo::usePreRAHazardRecognizer() const {
997139f7f9bSDimitry Andric return !DisableHazardRecognizer;
998139f7f9bSDimitry Andric }
999139f7f9bSDimitry Andric
1000139f7f9bSDimitry Andric // Default implementation of CreateTargetRAHazardRecognizer.
1001139f7f9bSDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::
CreateTargetHazardRecognizer(const TargetSubtargetInfo * STI,const ScheduleDAG * DAG) const100291bc56edSDimitry Andric CreateTargetHazardRecognizer(const TargetSubtargetInfo *STI,
1003139f7f9bSDimitry Andric const ScheduleDAG *DAG) const {
1004139f7f9bSDimitry Andric // Dummy hazard recognizer allows all instructions to issue.
1005139f7f9bSDimitry Andric return new ScheduleHazardRecognizer();
1006139f7f9bSDimitry Andric }
1007139f7f9bSDimitry Andric
1008139f7f9bSDimitry Andric // Default implementation of CreateTargetMIHazardRecognizer.
1009139f7f9bSDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::
CreateTargetMIHazardRecognizer(const InstrItineraryData * II,const ScheduleDAG * DAG) const1010139f7f9bSDimitry Andric CreateTargetMIHazardRecognizer(const InstrItineraryData *II,
1011139f7f9bSDimitry Andric const ScheduleDAG *DAG) const {
1012139f7f9bSDimitry Andric return (ScheduleHazardRecognizer *)
1013139f7f9bSDimitry Andric new ScoreboardHazardRecognizer(II, DAG, "misched");
1014139f7f9bSDimitry Andric }
1015139f7f9bSDimitry Andric
1016139f7f9bSDimitry Andric // Default implementation of CreateTargetPostRAHazardRecognizer.
1017139f7f9bSDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::
CreateTargetPostRAHazardRecognizer(const InstrItineraryData * II,const ScheduleDAG * DAG) const1018139f7f9bSDimitry Andric CreateTargetPostRAHazardRecognizer(const InstrItineraryData *II,
1019139f7f9bSDimitry Andric const ScheduleDAG *DAG) const {
1020139f7f9bSDimitry Andric return (ScheduleHazardRecognizer *)
1021139f7f9bSDimitry Andric new ScoreboardHazardRecognizer(II, DAG, "post-RA-sched");
1022139f7f9bSDimitry Andric }
1023139f7f9bSDimitry Andric
1024139f7f9bSDimitry Andric //===----------------------------------------------------------------------===//
1025139f7f9bSDimitry Andric // SelectionDAG latency interface.
1026139f7f9bSDimitry Andric //===----------------------------------------------------------------------===//
1027139f7f9bSDimitry Andric
1028139f7f9bSDimitry Andric int
getOperandLatency(const InstrItineraryData * ItinData,SDNode * DefNode,unsigned DefIdx,SDNode * UseNode,unsigned UseIdx) const1029139f7f9bSDimitry Andric TargetInstrInfo::getOperandLatency(const InstrItineraryData *ItinData,
1030139f7f9bSDimitry Andric SDNode *DefNode, unsigned DefIdx,
1031139f7f9bSDimitry Andric SDNode *UseNode, unsigned UseIdx) const {
1032139f7f9bSDimitry Andric if (!ItinData || ItinData->isEmpty())
1033139f7f9bSDimitry Andric return -1;
1034139f7f9bSDimitry Andric
1035139f7f9bSDimitry Andric if (!DefNode->isMachineOpcode())
1036139f7f9bSDimitry Andric return -1;
1037139f7f9bSDimitry Andric
1038139f7f9bSDimitry Andric unsigned DefClass = get(DefNode->getMachineOpcode()).getSchedClass();
1039139f7f9bSDimitry Andric if (!UseNode->isMachineOpcode())
1040139f7f9bSDimitry Andric return ItinData->getOperandCycle(DefClass, DefIdx);
1041139f7f9bSDimitry Andric unsigned UseClass = get(UseNode->getMachineOpcode()).getSchedClass();
1042139f7f9bSDimitry Andric return ItinData->getOperandLatency(DefClass, DefIdx, UseClass, UseIdx);
1043139f7f9bSDimitry Andric }
1044139f7f9bSDimitry Andric
getInstrLatency(const InstrItineraryData * ItinData,SDNode * N) const1045139f7f9bSDimitry Andric int TargetInstrInfo::getInstrLatency(const InstrItineraryData *ItinData,
1046139f7f9bSDimitry Andric SDNode *N) const {
1047139f7f9bSDimitry Andric if (!ItinData || ItinData->isEmpty())
1048139f7f9bSDimitry Andric return 1;
1049139f7f9bSDimitry Andric
1050139f7f9bSDimitry Andric if (!N->isMachineOpcode())
1051139f7f9bSDimitry Andric return 1;
1052139f7f9bSDimitry Andric
1053139f7f9bSDimitry Andric return ItinData->getStageLatency(get(N->getMachineOpcode()).getSchedClass());
1054139f7f9bSDimitry Andric }
1055139f7f9bSDimitry Andric
1056139f7f9bSDimitry Andric //===----------------------------------------------------------------------===//
1057139f7f9bSDimitry Andric // MachineInstr latency interface.
1058139f7f9bSDimitry Andric //===----------------------------------------------------------------------===//
1059139f7f9bSDimitry Andric
getNumMicroOps(const InstrItineraryData * ItinData,const MachineInstr & MI) const10603ca95b02SDimitry Andric unsigned TargetInstrInfo::getNumMicroOps(const InstrItineraryData *ItinData,
10613ca95b02SDimitry Andric const MachineInstr &MI) const {
1062139f7f9bSDimitry Andric if (!ItinData || ItinData->isEmpty())
1063139f7f9bSDimitry Andric return 1;
1064139f7f9bSDimitry Andric
10653ca95b02SDimitry Andric unsigned Class = MI.getDesc().getSchedClass();
1066139f7f9bSDimitry Andric int UOps = ItinData->Itineraries[Class].NumMicroOps;
1067139f7f9bSDimitry Andric if (UOps >= 0)
1068139f7f9bSDimitry Andric return UOps;
1069139f7f9bSDimitry Andric
1070139f7f9bSDimitry Andric // The # of u-ops is dynamically determined. The specific target should
1071139f7f9bSDimitry Andric // override this function to return the right number.
1072139f7f9bSDimitry Andric return 1;
1073139f7f9bSDimitry Andric }
1074139f7f9bSDimitry Andric
1075139f7f9bSDimitry Andric /// Return the default expected latency for a def based on it's opcode.
defaultDefLatency(const MCSchedModel & SchedModel,const MachineInstr & DefMI) const107639d628a0SDimitry Andric unsigned TargetInstrInfo::defaultDefLatency(const MCSchedModel &SchedModel,
10773ca95b02SDimitry Andric const MachineInstr &DefMI) const {
10783ca95b02SDimitry Andric if (DefMI.isTransient())
1079139f7f9bSDimitry Andric return 0;
10803ca95b02SDimitry Andric if (DefMI.mayLoad())
108139d628a0SDimitry Andric return SchedModel.LoadLatency;
10823ca95b02SDimitry Andric if (isHighLatencyDef(DefMI.getOpcode()))
108339d628a0SDimitry Andric return SchedModel.HighLatency;
1084139f7f9bSDimitry Andric return 1;
1085139f7f9bSDimitry Andric }
1086139f7f9bSDimitry Andric
getPredicationCost(const MachineInstr &) const10873ca95b02SDimitry Andric unsigned TargetInstrInfo::getPredicationCost(const MachineInstr &) const {
1088f785676fSDimitry Andric return 0;
1089f785676fSDimitry Andric }
1090f785676fSDimitry Andric
getInstrLatency(const InstrItineraryData * ItinData,const MachineInstr & MI,unsigned * PredCost) const10913ca95b02SDimitry Andric unsigned TargetInstrInfo::getInstrLatency(const InstrItineraryData *ItinData,
10923ca95b02SDimitry Andric const MachineInstr &MI,
1093139f7f9bSDimitry Andric unsigned *PredCost) const {
1094139f7f9bSDimitry Andric // Default to one cycle for no itinerary. However, an "empty" itinerary may
1095139f7f9bSDimitry Andric // still have a MinLatency property, which getStageLatency checks.
1096139f7f9bSDimitry Andric if (!ItinData)
10973ca95b02SDimitry Andric return MI.mayLoad() ? 2 : 1;
1098139f7f9bSDimitry Andric
10993ca95b02SDimitry Andric return ItinData->getStageLatency(MI.getDesc().getSchedClass());
1100139f7f9bSDimitry Andric }
1101139f7f9bSDimitry Andric
hasLowDefLatency(const TargetSchedModel & SchedModel,const MachineInstr & DefMI,unsigned DefIdx) const11028f0fd8f6SDimitry Andric bool TargetInstrInfo::hasLowDefLatency(const TargetSchedModel &SchedModel,
11033ca95b02SDimitry Andric const MachineInstr &DefMI,
1104139f7f9bSDimitry Andric unsigned DefIdx) const {
11058f0fd8f6SDimitry Andric const InstrItineraryData *ItinData = SchedModel.getInstrItineraries();
1106139f7f9bSDimitry Andric if (!ItinData || ItinData->isEmpty())
1107139f7f9bSDimitry Andric return false;
1108139f7f9bSDimitry Andric
11093ca95b02SDimitry Andric unsigned DefClass = DefMI.getDesc().getSchedClass();
1110139f7f9bSDimitry Andric int DefCycle = ItinData->getOperandCycle(DefClass, DefIdx);
1111139f7f9bSDimitry Andric return (DefCycle != -1 && DefCycle <= 1);
1112139f7f9bSDimitry Andric }
1113139f7f9bSDimitry Andric
1114139f7f9bSDimitry Andric /// Both DefMI and UseMI must be valid. By default, call directly to the
1115139f7f9bSDimitry Andric /// itinerary. This may be overriden by the target.
getOperandLatency(const InstrItineraryData * ItinData,const MachineInstr & DefMI,unsigned DefIdx,const MachineInstr & UseMI,unsigned UseIdx) const11163ca95b02SDimitry Andric int TargetInstrInfo::getOperandLatency(const InstrItineraryData *ItinData,
11173ca95b02SDimitry Andric const MachineInstr &DefMI,
11183ca95b02SDimitry Andric unsigned DefIdx,
11193ca95b02SDimitry Andric const MachineInstr &UseMI,
11203ca95b02SDimitry Andric unsigned UseIdx) const {
11213ca95b02SDimitry Andric unsigned DefClass = DefMI.getDesc().getSchedClass();
11223ca95b02SDimitry Andric unsigned UseClass = UseMI.getDesc().getSchedClass();
1123139f7f9bSDimitry Andric return ItinData->getOperandLatency(DefClass, DefIdx, UseClass, UseIdx);
1124139f7f9bSDimitry Andric }
1125139f7f9bSDimitry Andric
1126139f7f9bSDimitry Andric /// If we can determine the operand latency from the def only, without itinerary
1127139f7f9bSDimitry Andric /// lookup, do so. Otherwise return -1.
computeDefOperandLatency(const InstrItineraryData * ItinData,const MachineInstr & DefMI) const1128139f7f9bSDimitry Andric int TargetInstrInfo::computeDefOperandLatency(
11293ca95b02SDimitry Andric const InstrItineraryData *ItinData, const MachineInstr &DefMI) const {
1130139f7f9bSDimitry Andric
1131139f7f9bSDimitry Andric // Let the target hook getInstrLatency handle missing itineraries.
1132139f7f9bSDimitry Andric if (!ItinData)
1133139f7f9bSDimitry Andric return getInstrLatency(ItinData, DefMI);
1134139f7f9bSDimitry Andric
1135139f7f9bSDimitry Andric if(ItinData->isEmpty())
1136139f7f9bSDimitry Andric return defaultDefLatency(ItinData->SchedModel, DefMI);
1137139f7f9bSDimitry Andric
1138139f7f9bSDimitry Andric // ...operand lookup required
1139139f7f9bSDimitry Andric return -1;
1140139f7f9bSDimitry Andric }
1141139f7f9bSDimitry Andric
getRegSequenceInputs(const MachineInstr & MI,unsigned DefIdx,SmallVectorImpl<RegSubRegPairAndIdx> & InputRegs) const114239d628a0SDimitry Andric bool TargetInstrInfo::getRegSequenceInputs(
114339d628a0SDimitry Andric const MachineInstr &MI, unsigned DefIdx,
114439d628a0SDimitry Andric SmallVectorImpl<RegSubRegPairAndIdx> &InputRegs) const {
114539d628a0SDimitry Andric assert((MI.isRegSequence() ||
114639d628a0SDimitry Andric MI.isRegSequenceLike()) && "Instruction do not have the proper type");
114739d628a0SDimitry Andric
114839d628a0SDimitry Andric if (!MI.isRegSequence())
114939d628a0SDimitry Andric return getRegSequenceLikeInputs(MI, DefIdx, InputRegs);
115039d628a0SDimitry Andric
115139d628a0SDimitry Andric // We are looking at:
115239d628a0SDimitry Andric // Def = REG_SEQUENCE v0, sub0, v1, sub1, ...
115339d628a0SDimitry Andric assert(DefIdx == 0 && "REG_SEQUENCE only has one def");
115439d628a0SDimitry Andric for (unsigned OpIdx = 1, EndOpIdx = MI.getNumOperands(); OpIdx != EndOpIdx;
115539d628a0SDimitry Andric OpIdx += 2) {
115639d628a0SDimitry Andric const MachineOperand &MOReg = MI.getOperand(OpIdx);
11576ccc06f6SDimitry Andric if (MOReg.isUndef())
11586ccc06f6SDimitry Andric continue;
115939d628a0SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(OpIdx + 1);
116039d628a0SDimitry Andric assert(MOSubIdx.isImm() &&
116139d628a0SDimitry Andric "One of the subindex of the reg_sequence is not an immediate");
116239d628a0SDimitry Andric // Record Reg:SubReg, SubIdx.
116339d628a0SDimitry Andric InputRegs.push_back(RegSubRegPairAndIdx(MOReg.getReg(), MOReg.getSubReg(),
116439d628a0SDimitry Andric (unsigned)MOSubIdx.getImm()));
116539d628a0SDimitry Andric }
116639d628a0SDimitry Andric return true;
116739d628a0SDimitry Andric }
116839d628a0SDimitry Andric
getExtractSubregInputs(const MachineInstr & MI,unsigned DefIdx,RegSubRegPairAndIdx & InputReg) const116939d628a0SDimitry Andric bool TargetInstrInfo::getExtractSubregInputs(
117039d628a0SDimitry Andric const MachineInstr &MI, unsigned DefIdx,
117139d628a0SDimitry Andric RegSubRegPairAndIdx &InputReg) const {
117239d628a0SDimitry Andric assert((MI.isExtractSubreg() ||
117339d628a0SDimitry Andric MI.isExtractSubregLike()) && "Instruction do not have the proper type");
117439d628a0SDimitry Andric
117539d628a0SDimitry Andric if (!MI.isExtractSubreg())
117639d628a0SDimitry Andric return getExtractSubregLikeInputs(MI, DefIdx, InputReg);
117739d628a0SDimitry Andric
117839d628a0SDimitry Andric // We are looking at:
117939d628a0SDimitry Andric // Def = EXTRACT_SUBREG v0.sub1, sub0.
118039d628a0SDimitry Andric assert(DefIdx == 0 && "EXTRACT_SUBREG only has one def");
118139d628a0SDimitry Andric const MachineOperand &MOReg = MI.getOperand(1);
11826ccc06f6SDimitry Andric if (MOReg.isUndef())
11836ccc06f6SDimitry Andric return false;
118439d628a0SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(2);
118539d628a0SDimitry Andric assert(MOSubIdx.isImm() &&
118639d628a0SDimitry Andric "The subindex of the extract_subreg is not an immediate");
118739d628a0SDimitry Andric
118839d628a0SDimitry Andric InputReg.Reg = MOReg.getReg();
118939d628a0SDimitry Andric InputReg.SubReg = MOReg.getSubReg();
119039d628a0SDimitry Andric InputReg.SubIdx = (unsigned)MOSubIdx.getImm();
119139d628a0SDimitry Andric return true;
119239d628a0SDimitry Andric }
119339d628a0SDimitry Andric
getInsertSubregInputs(const MachineInstr & MI,unsigned DefIdx,RegSubRegPair & BaseReg,RegSubRegPairAndIdx & InsertedReg) const119439d628a0SDimitry Andric bool TargetInstrInfo::getInsertSubregInputs(
119539d628a0SDimitry Andric const MachineInstr &MI, unsigned DefIdx,
119639d628a0SDimitry Andric RegSubRegPair &BaseReg, RegSubRegPairAndIdx &InsertedReg) const {
119739d628a0SDimitry Andric assert((MI.isInsertSubreg() ||
119839d628a0SDimitry Andric MI.isInsertSubregLike()) && "Instruction do not have the proper type");
119939d628a0SDimitry Andric
120039d628a0SDimitry Andric if (!MI.isInsertSubreg())
120139d628a0SDimitry Andric return getInsertSubregLikeInputs(MI, DefIdx, BaseReg, InsertedReg);
120239d628a0SDimitry Andric
120339d628a0SDimitry Andric // We are looking at:
120439d628a0SDimitry Andric // Def = INSERT_SEQUENCE v0, v1, sub0.
120539d628a0SDimitry Andric assert(DefIdx == 0 && "INSERT_SUBREG only has one def");
120639d628a0SDimitry Andric const MachineOperand &MOBaseReg = MI.getOperand(1);
120739d628a0SDimitry Andric const MachineOperand &MOInsertedReg = MI.getOperand(2);
12086ccc06f6SDimitry Andric if (MOInsertedReg.isUndef())
12096ccc06f6SDimitry Andric return false;
121039d628a0SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(3);
121139d628a0SDimitry Andric assert(MOSubIdx.isImm() &&
121239d628a0SDimitry Andric "One of the subindex of the reg_sequence is not an immediate");
121339d628a0SDimitry Andric BaseReg.Reg = MOBaseReg.getReg();
121439d628a0SDimitry Andric BaseReg.SubReg = MOBaseReg.getSubReg();
121539d628a0SDimitry Andric
121639d628a0SDimitry Andric InsertedReg.Reg = MOInsertedReg.getReg();
121739d628a0SDimitry Andric InsertedReg.SubReg = MOInsertedReg.getSubReg();
121839d628a0SDimitry Andric InsertedReg.SubIdx = (unsigned)MOSubIdx.getImm();
121939d628a0SDimitry Andric return true;
122039d628a0SDimitry Andric }
1221