10b57cec5SDimitry Andric //===-- TargetInstrInfo.cpp - Target Instruction Information --------------===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric // This file implements the TargetInstrInfo class.
100b57cec5SDimitry Andric //
110b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
120b57cec5SDimitry Andric
130b57cec5SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h"
145ffd83dbSDimitry Andric #include "llvm/ADT/StringExtras.h"
150b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h"
160b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h"
170b57cec5SDimitry Andric #include "llvm/CodeGen/MachineMemOperand.h"
180b57cec5SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h"
19480093f4SDimitry Andric #include "llvm/CodeGen/MachineScheduler.h"
200b57cec5SDimitry Andric #include "llvm/CodeGen/PseudoSourceValue.h"
210b57cec5SDimitry Andric #include "llvm/CodeGen/ScoreboardHazardRecognizer.h"
220b57cec5SDimitry Andric #include "llvm/CodeGen/StackMaps.h"
230b57cec5SDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h"
240b57cec5SDimitry Andric #include "llvm/CodeGen/TargetLowering.h"
250b57cec5SDimitry Andric #include "llvm/CodeGen/TargetRegisterInfo.h"
260b57cec5SDimitry Andric #include "llvm/CodeGen/TargetSchedule.h"
270b57cec5SDimitry Andric #include "llvm/IR/DataLayout.h"
288bcb0991SDimitry Andric #include "llvm/IR/DebugInfoMetadata.h"
290b57cec5SDimitry Andric #include "llvm/MC/MCAsmInfo.h"
300b57cec5SDimitry Andric #include "llvm/MC/MCInstrItineraries.h"
310b57cec5SDimitry Andric #include "llvm/Support/CommandLine.h"
320b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h"
330b57cec5SDimitry Andric #include "llvm/Support/raw_ostream.h"
340b57cec5SDimitry Andric #include "llvm/Target/TargetMachine.h"
350b57cec5SDimitry Andric #include <cctype>
360b57cec5SDimitry Andric
370b57cec5SDimitry Andric using namespace llvm;
380b57cec5SDimitry Andric
390b57cec5SDimitry Andric static cl::opt<bool> DisableHazardRecognizer(
400b57cec5SDimitry Andric "disable-sched-hazard", cl::Hidden, cl::init(false),
410b57cec5SDimitry Andric cl::desc("Disable hazard detection during preRA scheduling"));
420b57cec5SDimitry Andric
~TargetInstrInfo()430b57cec5SDimitry Andric TargetInstrInfo::~TargetInstrInfo() {
440b57cec5SDimitry Andric }
450b57cec5SDimitry Andric
460b57cec5SDimitry Andric const TargetRegisterClass*
getRegClass(const MCInstrDesc & MCID,unsigned OpNum,const TargetRegisterInfo * TRI,const MachineFunction & MF) const470b57cec5SDimitry Andric TargetInstrInfo::getRegClass(const MCInstrDesc &MCID, unsigned OpNum,
480b57cec5SDimitry Andric const TargetRegisterInfo *TRI,
490b57cec5SDimitry Andric const MachineFunction &MF) const {
500b57cec5SDimitry Andric if (OpNum >= MCID.getNumOperands())
510b57cec5SDimitry Andric return nullptr;
520b57cec5SDimitry Andric
530b57cec5SDimitry Andric short RegClass = MCID.OpInfo[OpNum].RegClass;
540b57cec5SDimitry Andric if (MCID.OpInfo[OpNum].isLookupPtrRegClass())
550b57cec5SDimitry Andric return TRI->getPointerRegClass(MF, RegClass);
560b57cec5SDimitry Andric
570b57cec5SDimitry Andric // Instructions like INSERT_SUBREG do not have fixed register classes.
580b57cec5SDimitry Andric if (RegClass < 0)
590b57cec5SDimitry Andric return nullptr;
600b57cec5SDimitry Andric
610b57cec5SDimitry Andric // Otherwise just look it up normally.
620b57cec5SDimitry Andric return TRI->getRegClass(RegClass);
630b57cec5SDimitry Andric }
640b57cec5SDimitry Andric
650b57cec5SDimitry Andric /// insertNoop - Insert a noop into the instruction stream at the specified
660b57cec5SDimitry Andric /// point.
insertNoop(MachineBasicBlock & MBB,MachineBasicBlock::iterator MI) const670b57cec5SDimitry Andric void TargetInstrInfo::insertNoop(MachineBasicBlock &MBB,
680b57cec5SDimitry Andric MachineBasicBlock::iterator MI) const {
690b57cec5SDimitry Andric llvm_unreachable("Target didn't implement insertNoop!");
700b57cec5SDimitry Andric }
710b57cec5SDimitry Andric
72af732203SDimitry Andric /// insertNoops - Insert noops into the instruction stream at the specified
73af732203SDimitry Andric /// point.
insertNoops(MachineBasicBlock & MBB,MachineBasicBlock::iterator MI,unsigned Quantity) const74af732203SDimitry Andric void TargetInstrInfo::insertNoops(MachineBasicBlock &MBB,
75af732203SDimitry Andric MachineBasicBlock::iterator MI,
76af732203SDimitry Andric unsigned Quantity) const {
77af732203SDimitry Andric for (unsigned i = 0; i < Quantity; ++i)
78af732203SDimitry Andric insertNoop(MBB, MI);
79af732203SDimitry Andric }
80af732203SDimitry Andric
isAsmComment(const char * Str,const MCAsmInfo & MAI)810b57cec5SDimitry Andric static bool isAsmComment(const char *Str, const MCAsmInfo &MAI) {
820b57cec5SDimitry Andric return strncmp(Str, MAI.getCommentString().data(),
830b57cec5SDimitry Andric MAI.getCommentString().size()) == 0;
840b57cec5SDimitry Andric }
850b57cec5SDimitry Andric
860b57cec5SDimitry Andric /// Measure the specified inline asm to determine an approximation of its
870b57cec5SDimitry Andric /// length.
880b57cec5SDimitry Andric /// Comments (which run till the next SeparatorString or newline) do not
890b57cec5SDimitry Andric /// count as an instruction.
900b57cec5SDimitry Andric /// Any other non-whitespace text is considered an instruction, with
910b57cec5SDimitry Andric /// multiple instructions separated by SeparatorString or newlines.
920b57cec5SDimitry Andric /// Variable-length instructions are not handled here; this function
930b57cec5SDimitry Andric /// may be overloaded in the target code to do that.
940b57cec5SDimitry Andric /// We implement a special case of the .space directive which takes only a
950b57cec5SDimitry Andric /// single integer argument in base 10 that is the size in bytes. This is a
960b57cec5SDimitry Andric /// restricted form of the GAS directive in that we only interpret
970b57cec5SDimitry Andric /// simple--i.e. not a logical or arithmetic expression--size values without
980b57cec5SDimitry Andric /// the optional fill value. This is primarily used for creating arbitrary
990b57cec5SDimitry Andric /// sized inline asm blocks for testing purposes.
getInlineAsmLength(const char * Str,const MCAsmInfo & MAI,const TargetSubtargetInfo * STI) const1000b57cec5SDimitry Andric unsigned TargetInstrInfo::getInlineAsmLength(
1010b57cec5SDimitry Andric const char *Str,
1020b57cec5SDimitry Andric const MCAsmInfo &MAI, const TargetSubtargetInfo *STI) const {
1030b57cec5SDimitry Andric // Count the number of instructions in the asm.
1040b57cec5SDimitry Andric bool AtInsnStart = true;
1050b57cec5SDimitry Andric unsigned Length = 0;
1060b57cec5SDimitry Andric const unsigned MaxInstLength = MAI.getMaxInstLength(STI);
1070b57cec5SDimitry Andric for (; *Str; ++Str) {
1080b57cec5SDimitry Andric if (*Str == '\n' || strncmp(Str, MAI.getSeparatorString(),
1090b57cec5SDimitry Andric strlen(MAI.getSeparatorString())) == 0) {
1100b57cec5SDimitry Andric AtInsnStart = true;
1110b57cec5SDimitry Andric } else if (isAsmComment(Str, MAI)) {
1120b57cec5SDimitry Andric // Stop counting as an instruction after a comment until the next
1130b57cec5SDimitry Andric // separator.
1140b57cec5SDimitry Andric AtInsnStart = false;
1150b57cec5SDimitry Andric }
1160b57cec5SDimitry Andric
1175ffd83dbSDimitry Andric if (AtInsnStart && !isSpace(static_cast<unsigned char>(*Str))) {
1180b57cec5SDimitry Andric unsigned AddLength = MaxInstLength;
1190b57cec5SDimitry Andric if (strncmp(Str, ".space", 6) == 0) {
1200b57cec5SDimitry Andric char *EStr;
1210b57cec5SDimitry Andric int SpaceSize;
1220b57cec5SDimitry Andric SpaceSize = strtol(Str + 6, &EStr, 10);
1230b57cec5SDimitry Andric SpaceSize = SpaceSize < 0 ? 0 : SpaceSize;
1245ffd83dbSDimitry Andric while (*EStr != '\n' && isSpace(static_cast<unsigned char>(*EStr)))
1250b57cec5SDimitry Andric ++EStr;
1260b57cec5SDimitry Andric if (*EStr == '\0' || *EStr == '\n' ||
1270b57cec5SDimitry Andric isAsmComment(EStr, MAI)) // Successfully parsed .space argument
1280b57cec5SDimitry Andric AddLength = SpaceSize;
1290b57cec5SDimitry Andric }
1300b57cec5SDimitry Andric Length += AddLength;
1310b57cec5SDimitry Andric AtInsnStart = false;
1320b57cec5SDimitry Andric }
1330b57cec5SDimitry Andric }
1340b57cec5SDimitry Andric
1350b57cec5SDimitry Andric return Length;
1360b57cec5SDimitry Andric }
1370b57cec5SDimitry Andric
1380b57cec5SDimitry Andric /// ReplaceTailWithBranchTo - Delete the instruction OldInst and everything
1390b57cec5SDimitry Andric /// after it, replacing it with an unconditional branch to NewDest.
1400b57cec5SDimitry Andric void
ReplaceTailWithBranchTo(MachineBasicBlock::iterator Tail,MachineBasicBlock * NewDest) const1410b57cec5SDimitry Andric TargetInstrInfo::ReplaceTailWithBranchTo(MachineBasicBlock::iterator Tail,
1420b57cec5SDimitry Andric MachineBasicBlock *NewDest) const {
1430b57cec5SDimitry Andric MachineBasicBlock *MBB = Tail->getParent();
1440b57cec5SDimitry Andric
1450b57cec5SDimitry Andric // Remove all the old successors of MBB from the CFG.
1460b57cec5SDimitry Andric while (!MBB->succ_empty())
1470b57cec5SDimitry Andric MBB->removeSuccessor(MBB->succ_begin());
1480b57cec5SDimitry Andric
1490b57cec5SDimitry Andric // Save off the debug loc before erasing the instruction.
1500b57cec5SDimitry Andric DebugLoc DL = Tail->getDebugLoc();
1510b57cec5SDimitry Andric
1520b57cec5SDimitry Andric // Update call site info and remove all the dead instructions
1530b57cec5SDimitry Andric // from the end of MBB.
1540b57cec5SDimitry Andric while (Tail != MBB->end()) {
1550b57cec5SDimitry Andric auto MI = Tail++;
1565ffd83dbSDimitry Andric if (MI->shouldUpdateCallSiteInfo())
1578bcb0991SDimitry Andric MBB->getParent()->eraseCallSiteInfo(&*MI);
1580b57cec5SDimitry Andric MBB->erase(MI);
1590b57cec5SDimitry Andric }
1600b57cec5SDimitry Andric
1610b57cec5SDimitry Andric // If MBB isn't immediately before MBB, insert a branch to it.
1620b57cec5SDimitry Andric if (++MachineFunction::iterator(MBB) != MachineFunction::iterator(NewDest))
1630b57cec5SDimitry Andric insertBranch(*MBB, NewDest, nullptr, SmallVector<MachineOperand, 0>(), DL);
1640b57cec5SDimitry Andric MBB->addSuccessor(NewDest);
1650b57cec5SDimitry Andric }
1660b57cec5SDimitry Andric
commuteInstructionImpl(MachineInstr & MI,bool NewMI,unsigned Idx1,unsigned Idx2) const1670b57cec5SDimitry Andric MachineInstr *TargetInstrInfo::commuteInstructionImpl(MachineInstr &MI,
1680b57cec5SDimitry Andric bool NewMI, unsigned Idx1,
1690b57cec5SDimitry Andric unsigned Idx2) const {
1700b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc();
1710b57cec5SDimitry Andric bool HasDef = MCID.getNumDefs();
1720b57cec5SDimitry Andric if (HasDef && !MI.getOperand(0).isReg())
1730b57cec5SDimitry Andric // No idea how to commute this instruction. Target should implement its own.
1740b57cec5SDimitry Andric return nullptr;
1750b57cec5SDimitry Andric
1760b57cec5SDimitry Andric unsigned CommutableOpIdx1 = Idx1; (void)CommutableOpIdx1;
1770b57cec5SDimitry Andric unsigned CommutableOpIdx2 = Idx2; (void)CommutableOpIdx2;
1780b57cec5SDimitry Andric assert(findCommutedOpIndices(MI, CommutableOpIdx1, CommutableOpIdx2) &&
1790b57cec5SDimitry Andric CommutableOpIdx1 == Idx1 && CommutableOpIdx2 == Idx2 &&
1800b57cec5SDimitry Andric "TargetInstrInfo::CommuteInstructionImpl(): not commutable operands.");
1810b57cec5SDimitry Andric assert(MI.getOperand(Idx1).isReg() && MI.getOperand(Idx2).isReg() &&
1820b57cec5SDimitry Andric "This only knows how to commute register operands so far");
1830b57cec5SDimitry Andric
1840b57cec5SDimitry Andric Register Reg0 = HasDef ? MI.getOperand(0).getReg() : Register();
1850b57cec5SDimitry Andric Register Reg1 = MI.getOperand(Idx1).getReg();
1860b57cec5SDimitry Andric Register Reg2 = MI.getOperand(Idx2).getReg();
1870b57cec5SDimitry Andric unsigned SubReg0 = HasDef ? MI.getOperand(0).getSubReg() : 0;
1880b57cec5SDimitry Andric unsigned SubReg1 = MI.getOperand(Idx1).getSubReg();
1890b57cec5SDimitry Andric unsigned SubReg2 = MI.getOperand(Idx2).getSubReg();
1900b57cec5SDimitry Andric bool Reg1IsKill = MI.getOperand(Idx1).isKill();
1910b57cec5SDimitry Andric bool Reg2IsKill = MI.getOperand(Idx2).isKill();
1920b57cec5SDimitry Andric bool Reg1IsUndef = MI.getOperand(Idx1).isUndef();
1930b57cec5SDimitry Andric bool Reg2IsUndef = MI.getOperand(Idx2).isUndef();
1940b57cec5SDimitry Andric bool Reg1IsInternal = MI.getOperand(Idx1).isInternalRead();
1950b57cec5SDimitry Andric bool Reg2IsInternal = MI.getOperand(Idx2).isInternalRead();
1960b57cec5SDimitry Andric // Avoid calling isRenamable for virtual registers since we assert that
1970b57cec5SDimitry Andric // renamable property is only queried/set for physical registers.
1988bcb0991SDimitry Andric bool Reg1IsRenamable = Register::isPhysicalRegister(Reg1)
1990b57cec5SDimitry Andric ? MI.getOperand(Idx1).isRenamable()
2000b57cec5SDimitry Andric : false;
2018bcb0991SDimitry Andric bool Reg2IsRenamable = Register::isPhysicalRegister(Reg2)
2020b57cec5SDimitry Andric ? MI.getOperand(Idx2).isRenamable()
2030b57cec5SDimitry Andric : false;
2040b57cec5SDimitry Andric // If destination is tied to either of the commuted source register, then
2050b57cec5SDimitry Andric // it must be updated.
2060b57cec5SDimitry Andric if (HasDef && Reg0 == Reg1 &&
2070b57cec5SDimitry Andric MI.getDesc().getOperandConstraint(Idx1, MCOI::TIED_TO) == 0) {
2080b57cec5SDimitry Andric Reg2IsKill = false;
2090b57cec5SDimitry Andric Reg0 = Reg2;
2100b57cec5SDimitry Andric SubReg0 = SubReg2;
2110b57cec5SDimitry Andric } else if (HasDef && Reg0 == Reg2 &&
2120b57cec5SDimitry Andric MI.getDesc().getOperandConstraint(Idx2, MCOI::TIED_TO) == 0) {
2130b57cec5SDimitry Andric Reg1IsKill = false;
2140b57cec5SDimitry Andric Reg0 = Reg1;
2150b57cec5SDimitry Andric SubReg0 = SubReg1;
2160b57cec5SDimitry Andric }
2170b57cec5SDimitry Andric
2180b57cec5SDimitry Andric MachineInstr *CommutedMI = nullptr;
2190b57cec5SDimitry Andric if (NewMI) {
2200b57cec5SDimitry Andric // Create a new instruction.
2210b57cec5SDimitry Andric MachineFunction &MF = *MI.getMF();
2220b57cec5SDimitry Andric CommutedMI = MF.CloneMachineInstr(&MI);
2230b57cec5SDimitry Andric } else {
2240b57cec5SDimitry Andric CommutedMI = &MI;
2250b57cec5SDimitry Andric }
2260b57cec5SDimitry Andric
2270b57cec5SDimitry Andric if (HasDef) {
2280b57cec5SDimitry Andric CommutedMI->getOperand(0).setReg(Reg0);
2290b57cec5SDimitry Andric CommutedMI->getOperand(0).setSubReg(SubReg0);
2300b57cec5SDimitry Andric }
2310b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setReg(Reg1);
2320b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setReg(Reg2);
2330b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setSubReg(SubReg1);
2340b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setSubReg(SubReg2);
2350b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setIsKill(Reg1IsKill);
2360b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setIsKill(Reg2IsKill);
2370b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setIsUndef(Reg1IsUndef);
2380b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setIsUndef(Reg2IsUndef);
2390b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setIsInternalRead(Reg1IsInternal);
2400b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setIsInternalRead(Reg2IsInternal);
2410b57cec5SDimitry Andric // Avoid calling setIsRenamable for virtual registers since we assert that
2420b57cec5SDimitry Andric // renamable property is only queried/set for physical registers.
2438bcb0991SDimitry Andric if (Register::isPhysicalRegister(Reg1))
2440b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setIsRenamable(Reg1IsRenamable);
2458bcb0991SDimitry Andric if (Register::isPhysicalRegister(Reg2))
2460b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setIsRenamable(Reg2IsRenamable);
2470b57cec5SDimitry Andric return CommutedMI;
2480b57cec5SDimitry Andric }
2490b57cec5SDimitry Andric
commuteInstruction(MachineInstr & MI,bool NewMI,unsigned OpIdx1,unsigned OpIdx2) const2500b57cec5SDimitry Andric MachineInstr *TargetInstrInfo::commuteInstruction(MachineInstr &MI, bool NewMI,
2510b57cec5SDimitry Andric unsigned OpIdx1,
2520b57cec5SDimitry Andric unsigned OpIdx2) const {
2530b57cec5SDimitry Andric // If OpIdx1 or OpIdx2 is not specified, then this method is free to choose
2540b57cec5SDimitry Andric // any commutable operand, which is done in findCommutedOpIndices() method
2550b57cec5SDimitry Andric // called below.
2560b57cec5SDimitry Andric if ((OpIdx1 == CommuteAnyOperandIndex || OpIdx2 == CommuteAnyOperandIndex) &&
2570b57cec5SDimitry Andric !findCommutedOpIndices(MI, OpIdx1, OpIdx2)) {
2580b57cec5SDimitry Andric assert(MI.isCommutable() &&
2590b57cec5SDimitry Andric "Precondition violation: MI must be commutable.");
2600b57cec5SDimitry Andric return nullptr;
2610b57cec5SDimitry Andric }
2620b57cec5SDimitry Andric return commuteInstructionImpl(MI, NewMI, OpIdx1, OpIdx2);
2630b57cec5SDimitry Andric }
2640b57cec5SDimitry Andric
fixCommutedOpIndices(unsigned & ResultIdx1,unsigned & ResultIdx2,unsigned CommutableOpIdx1,unsigned CommutableOpIdx2)2650b57cec5SDimitry Andric bool TargetInstrInfo::fixCommutedOpIndices(unsigned &ResultIdx1,
2660b57cec5SDimitry Andric unsigned &ResultIdx2,
2670b57cec5SDimitry Andric unsigned CommutableOpIdx1,
2680b57cec5SDimitry Andric unsigned CommutableOpIdx2) {
2690b57cec5SDimitry Andric if (ResultIdx1 == CommuteAnyOperandIndex &&
2700b57cec5SDimitry Andric ResultIdx2 == CommuteAnyOperandIndex) {
2710b57cec5SDimitry Andric ResultIdx1 = CommutableOpIdx1;
2720b57cec5SDimitry Andric ResultIdx2 = CommutableOpIdx2;
2730b57cec5SDimitry Andric } else if (ResultIdx1 == CommuteAnyOperandIndex) {
2740b57cec5SDimitry Andric if (ResultIdx2 == CommutableOpIdx1)
2750b57cec5SDimitry Andric ResultIdx1 = CommutableOpIdx2;
2760b57cec5SDimitry Andric else if (ResultIdx2 == CommutableOpIdx2)
2770b57cec5SDimitry Andric ResultIdx1 = CommutableOpIdx1;
2780b57cec5SDimitry Andric else
2790b57cec5SDimitry Andric return false;
2800b57cec5SDimitry Andric } else if (ResultIdx2 == CommuteAnyOperandIndex) {
2810b57cec5SDimitry Andric if (ResultIdx1 == CommutableOpIdx1)
2820b57cec5SDimitry Andric ResultIdx2 = CommutableOpIdx2;
2830b57cec5SDimitry Andric else if (ResultIdx1 == CommutableOpIdx2)
2840b57cec5SDimitry Andric ResultIdx2 = CommutableOpIdx1;
2850b57cec5SDimitry Andric else
2860b57cec5SDimitry Andric return false;
2870b57cec5SDimitry Andric } else
2880b57cec5SDimitry Andric // Check that the result operand indices match the given commutable
2890b57cec5SDimitry Andric // operand indices.
2900b57cec5SDimitry Andric return (ResultIdx1 == CommutableOpIdx1 && ResultIdx2 == CommutableOpIdx2) ||
2910b57cec5SDimitry Andric (ResultIdx1 == CommutableOpIdx2 && ResultIdx2 == CommutableOpIdx1);
2920b57cec5SDimitry Andric
2930b57cec5SDimitry Andric return true;
2940b57cec5SDimitry Andric }
2950b57cec5SDimitry Andric
findCommutedOpIndices(const MachineInstr & MI,unsigned & SrcOpIdx1,unsigned & SrcOpIdx2) const2968bcb0991SDimitry Andric bool TargetInstrInfo::findCommutedOpIndices(const MachineInstr &MI,
2970b57cec5SDimitry Andric unsigned &SrcOpIdx1,
2980b57cec5SDimitry Andric unsigned &SrcOpIdx2) const {
2990b57cec5SDimitry Andric assert(!MI.isBundle() &&
3000b57cec5SDimitry Andric "TargetInstrInfo::findCommutedOpIndices() can't handle bundles");
3010b57cec5SDimitry Andric
3020b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc();
3030b57cec5SDimitry Andric if (!MCID.isCommutable())
3040b57cec5SDimitry Andric return false;
3050b57cec5SDimitry Andric
3060b57cec5SDimitry Andric // This assumes v0 = op v1, v2 and commuting would swap v1 and v2. If this
3070b57cec5SDimitry Andric // is not true, then the target must implement this.
3080b57cec5SDimitry Andric unsigned CommutableOpIdx1 = MCID.getNumDefs();
3090b57cec5SDimitry Andric unsigned CommutableOpIdx2 = CommutableOpIdx1 + 1;
3100b57cec5SDimitry Andric if (!fixCommutedOpIndices(SrcOpIdx1, SrcOpIdx2,
3110b57cec5SDimitry Andric CommutableOpIdx1, CommutableOpIdx2))
3120b57cec5SDimitry Andric return false;
3130b57cec5SDimitry Andric
3140b57cec5SDimitry Andric if (!MI.getOperand(SrcOpIdx1).isReg() || !MI.getOperand(SrcOpIdx2).isReg())
3150b57cec5SDimitry Andric // No idea.
3160b57cec5SDimitry Andric return false;
3170b57cec5SDimitry Andric return true;
3180b57cec5SDimitry Andric }
3190b57cec5SDimitry Andric
isUnpredicatedTerminator(const MachineInstr & MI) const3200b57cec5SDimitry Andric bool TargetInstrInfo::isUnpredicatedTerminator(const MachineInstr &MI) const {
3210b57cec5SDimitry Andric if (!MI.isTerminator()) return false;
3220b57cec5SDimitry Andric
3230b57cec5SDimitry Andric // Conditional branch is a special case.
3240b57cec5SDimitry Andric if (MI.isBranch() && !MI.isBarrier())
3250b57cec5SDimitry Andric return true;
3260b57cec5SDimitry Andric if (!MI.isPredicable())
3270b57cec5SDimitry Andric return true;
3280b57cec5SDimitry Andric return !isPredicated(MI);
3290b57cec5SDimitry Andric }
3300b57cec5SDimitry Andric
PredicateInstruction(MachineInstr & MI,ArrayRef<MachineOperand> Pred) const3310b57cec5SDimitry Andric bool TargetInstrInfo::PredicateInstruction(
3320b57cec5SDimitry Andric MachineInstr &MI, ArrayRef<MachineOperand> Pred) const {
3330b57cec5SDimitry Andric bool MadeChange = false;
3340b57cec5SDimitry Andric
3350b57cec5SDimitry Andric assert(!MI.isBundle() &&
3360b57cec5SDimitry Andric "TargetInstrInfo::PredicateInstruction() can't handle bundles");
3370b57cec5SDimitry Andric
3380b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc();
3390b57cec5SDimitry Andric if (!MI.isPredicable())
3400b57cec5SDimitry Andric return false;
3410b57cec5SDimitry Andric
3420b57cec5SDimitry Andric for (unsigned j = 0, i = 0, e = MI.getNumOperands(); i != e; ++i) {
3430b57cec5SDimitry Andric if (MCID.OpInfo[i].isPredicate()) {
3440b57cec5SDimitry Andric MachineOperand &MO = MI.getOperand(i);
3450b57cec5SDimitry Andric if (MO.isReg()) {
3460b57cec5SDimitry Andric MO.setReg(Pred[j].getReg());
3470b57cec5SDimitry Andric MadeChange = true;
3480b57cec5SDimitry Andric } else if (MO.isImm()) {
3490b57cec5SDimitry Andric MO.setImm(Pred[j].getImm());
3500b57cec5SDimitry Andric MadeChange = true;
3510b57cec5SDimitry Andric } else if (MO.isMBB()) {
3520b57cec5SDimitry Andric MO.setMBB(Pred[j].getMBB());
3530b57cec5SDimitry Andric MadeChange = true;
3540b57cec5SDimitry Andric }
3550b57cec5SDimitry Andric ++j;
3560b57cec5SDimitry Andric }
3570b57cec5SDimitry Andric }
3580b57cec5SDimitry Andric return MadeChange;
3590b57cec5SDimitry Andric }
3600b57cec5SDimitry Andric
hasLoadFromStackSlot(const MachineInstr & MI,SmallVectorImpl<const MachineMemOperand * > & Accesses) const3610b57cec5SDimitry Andric bool TargetInstrInfo::hasLoadFromStackSlot(
3620b57cec5SDimitry Andric const MachineInstr &MI,
3630b57cec5SDimitry Andric SmallVectorImpl<const MachineMemOperand *> &Accesses) const {
3640b57cec5SDimitry Andric size_t StartSize = Accesses.size();
3650b57cec5SDimitry Andric for (MachineInstr::mmo_iterator o = MI.memoperands_begin(),
3660b57cec5SDimitry Andric oe = MI.memoperands_end();
3670b57cec5SDimitry Andric o != oe; ++o) {
3680b57cec5SDimitry Andric if ((*o)->isLoad() &&
3690b57cec5SDimitry Andric dyn_cast_or_null<FixedStackPseudoSourceValue>((*o)->getPseudoValue()))
3700b57cec5SDimitry Andric Accesses.push_back(*o);
3710b57cec5SDimitry Andric }
3720b57cec5SDimitry Andric return Accesses.size() != StartSize;
3730b57cec5SDimitry Andric }
3740b57cec5SDimitry Andric
hasStoreToStackSlot(const MachineInstr & MI,SmallVectorImpl<const MachineMemOperand * > & Accesses) const3750b57cec5SDimitry Andric bool TargetInstrInfo::hasStoreToStackSlot(
3760b57cec5SDimitry Andric const MachineInstr &MI,
3770b57cec5SDimitry Andric SmallVectorImpl<const MachineMemOperand *> &Accesses) const {
3780b57cec5SDimitry Andric size_t StartSize = Accesses.size();
3790b57cec5SDimitry Andric for (MachineInstr::mmo_iterator o = MI.memoperands_begin(),
3800b57cec5SDimitry Andric oe = MI.memoperands_end();
3810b57cec5SDimitry Andric o != oe; ++o) {
3820b57cec5SDimitry Andric if ((*o)->isStore() &&
3830b57cec5SDimitry Andric dyn_cast_or_null<FixedStackPseudoSourceValue>((*o)->getPseudoValue()))
3840b57cec5SDimitry Andric Accesses.push_back(*o);
3850b57cec5SDimitry Andric }
3860b57cec5SDimitry Andric return Accesses.size() != StartSize;
3870b57cec5SDimitry Andric }
3880b57cec5SDimitry Andric
getStackSlotRange(const TargetRegisterClass * RC,unsigned SubIdx,unsigned & Size,unsigned & Offset,const MachineFunction & MF) const3890b57cec5SDimitry Andric bool TargetInstrInfo::getStackSlotRange(const TargetRegisterClass *RC,
3900b57cec5SDimitry Andric unsigned SubIdx, unsigned &Size,
3910b57cec5SDimitry Andric unsigned &Offset,
3920b57cec5SDimitry Andric const MachineFunction &MF) const {
3930b57cec5SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
3940b57cec5SDimitry Andric if (!SubIdx) {
3950b57cec5SDimitry Andric Size = TRI->getSpillSize(*RC);
3960b57cec5SDimitry Andric Offset = 0;
3970b57cec5SDimitry Andric return true;
3980b57cec5SDimitry Andric }
3990b57cec5SDimitry Andric unsigned BitSize = TRI->getSubRegIdxSize(SubIdx);
4000b57cec5SDimitry Andric // Convert bit size to byte size.
4010b57cec5SDimitry Andric if (BitSize % 8)
4020b57cec5SDimitry Andric return false;
4030b57cec5SDimitry Andric
4040b57cec5SDimitry Andric int BitOffset = TRI->getSubRegIdxOffset(SubIdx);
4050b57cec5SDimitry Andric if (BitOffset < 0 || BitOffset % 8)
4060b57cec5SDimitry Andric return false;
4070b57cec5SDimitry Andric
4088bcb0991SDimitry Andric Size = BitSize / 8;
4090b57cec5SDimitry Andric Offset = (unsigned)BitOffset / 8;
4100b57cec5SDimitry Andric
4110b57cec5SDimitry Andric assert(TRI->getSpillSize(*RC) >= (Offset + Size) && "bad subregister range");
4120b57cec5SDimitry Andric
4130b57cec5SDimitry Andric if (!MF.getDataLayout().isLittleEndian()) {
4140b57cec5SDimitry Andric Offset = TRI->getSpillSize(*RC) - (Offset + Size);
4150b57cec5SDimitry Andric }
4160b57cec5SDimitry Andric return true;
4170b57cec5SDimitry Andric }
4180b57cec5SDimitry Andric
reMaterialize(MachineBasicBlock & MBB,MachineBasicBlock::iterator I,Register DestReg,unsigned SubIdx,const MachineInstr & Orig,const TargetRegisterInfo & TRI) const4190b57cec5SDimitry Andric void TargetInstrInfo::reMaterialize(MachineBasicBlock &MBB,
4200b57cec5SDimitry Andric MachineBasicBlock::iterator I,
4215ffd83dbSDimitry Andric Register DestReg, unsigned SubIdx,
4220b57cec5SDimitry Andric const MachineInstr &Orig,
4230b57cec5SDimitry Andric const TargetRegisterInfo &TRI) const {
4240b57cec5SDimitry Andric MachineInstr *MI = MBB.getParent()->CloneMachineInstr(&Orig);
4250b57cec5SDimitry Andric MI->substituteRegister(MI->getOperand(0).getReg(), DestReg, SubIdx, TRI);
4260b57cec5SDimitry Andric MBB.insert(I, MI);
4270b57cec5SDimitry Andric }
4280b57cec5SDimitry Andric
produceSameValue(const MachineInstr & MI0,const MachineInstr & MI1,const MachineRegisterInfo * MRI) const4290b57cec5SDimitry Andric bool TargetInstrInfo::produceSameValue(const MachineInstr &MI0,
4300b57cec5SDimitry Andric const MachineInstr &MI1,
4310b57cec5SDimitry Andric const MachineRegisterInfo *MRI) const {
4320b57cec5SDimitry Andric return MI0.isIdenticalTo(MI1, MachineInstr::IgnoreVRegDefs);
4330b57cec5SDimitry Andric }
4340b57cec5SDimitry Andric
duplicate(MachineBasicBlock & MBB,MachineBasicBlock::iterator InsertBefore,const MachineInstr & Orig) const4350b57cec5SDimitry Andric MachineInstr &TargetInstrInfo::duplicate(MachineBasicBlock &MBB,
4360b57cec5SDimitry Andric MachineBasicBlock::iterator InsertBefore, const MachineInstr &Orig) const {
4370b57cec5SDimitry Andric assert(!Orig.isNotDuplicable() && "Instruction cannot be duplicated");
4380b57cec5SDimitry Andric MachineFunction &MF = *MBB.getParent();
4390b57cec5SDimitry Andric return MF.CloneMachineInstrBundle(MBB, InsertBefore, Orig);
4400b57cec5SDimitry Andric }
4410b57cec5SDimitry Andric
4420b57cec5SDimitry Andric // If the COPY instruction in MI can be folded to a stack operation, return
4430b57cec5SDimitry Andric // the register class to use.
canFoldCopy(const MachineInstr & MI,unsigned FoldIdx)4440b57cec5SDimitry Andric static const TargetRegisterClass *canFoldCopy(const MachineInstr &MI,
4450b57cec5SDimitry Andric unsigned FoldIdx) {
4460b57cec5SDimitry Andric assert(MI.isCopy() && "MI must be a COPY instruction");
4470b57cec5SDimitry Andric if (MI.getNumOperands() != 2)
4480b57cec5SDimitry Andric return nullptr;
4490b57cec5SDimitry Andric assert(FoldIdx<2 && "FoldIdx refers no nonexistent operand");
4500b57cec5SDimitry Andric
4510b57cec5SDimitry Andric const MachineOperand &FoldOp = MI.getOperand(FoldIdx);
4520b57cec5SDimitry Andric const MachineOperand &LiveOp = MI.getOperand(1 - FoldIdx);
4530b57cec5SDimitry Andric
4540b57cec5SDimitry Andric if (FoldOp.getSubReg() || LiveOp.getSubReg())
4550b57cec5SDimitry Andric return nullptr;
4560b57cec5SDimitry Andric
4578bcb0991SDimitry Andric Register FoldReg = FoldOp.getReg();
4588bcb0991SDimitry Andric Register LiveReg = LiveOp.getReg();
4590b57cec5SDimitry Andric
4608bcb0991SDimitry Andric assert(Register::isVirtualRegister(FoldReg) && "Cannot fold physregs");
4610b57cec5SDimitry Andric
4620b57cec5SDimitry Andric const MachineRegisterInfo &MRI = MI.getMF()->getRegInfo();
4630b57cec5SDimitry Andric const TargetRegisterClass *RC = MRI.getRegClass(FoldReg);
4640b57cec5SDimitry Andric
4658bcb0991SDimitry Andric if (Register::isPhysicalRegister(LiveOp.getReg()))
4660b57cec5SDimitry Andric return RC->contains(LiveOp.getReg()) ? RC : nullptr;
4670b57cec5SDimitry Andric
4680b57cec5SDimitry Andric if (RC->hasSubClassEq(MRI.getRegClass(LiveReg)))
4690b57cec5SDimitry Andric return RC;
4700b57cec5SDimitry Andric
4710b57cec5SDimitry Andric // FIXME: Allow folding when register classes are memory compatible.
4720b57cec5SDimitry Andric return nullptr;
4730b57cec5SDimitry Andric }
4740b57cec5SDimitry Andric
getNop() const475*5f7ddb14SDimitry Andric MCInst TargetInstrInfo::getNop() const { llvm_unreachable("Not implemented"); }
476*5f7ddb14SDimitry Andric
477*5f7ddb14SDimitry Andric std::pair<unsigned, unsigned>
getPatchpointUnfoldableRange(const MachineInstr & MI) const478*5f7ddb14SDimitry Andric TargetInstrInfo::getPatchpointUnfoldableRange(const MachineInstr &MI) const {
479*5f7ddb14SDimitry Andric switch (MI.getOpcode()) {
480*5f7ddb14SDimitry Andric case TargetOpcode::STACKMAP:
481*5f7ddb14SDimitry Andric // StackMapLiveValues are foldable
482*5f7ddb14SDimitry Andric return std::make_pair(0, StackMapOpers(&MI).getVarIdx());
483*5f7ddb14SDimitry Andric case TargetOpcode::PATCHPOINT:
484*5f7ddb14SDimitry Andric // For PatchPoint, the call args are not foldable (even if reported in the
485*5f7ddb14SDimitry Andric // stackmap e.g. via anyregcc).
486*5f7ddb14SDimitry Andric return std::make_pair(0, PatchPointOpers(&MI).getVarIdx());
487*5f7ddb14SDimitry Andric case TargetOpcode::STATEPOINT:
488*5f7ddb14SDimitry Andric // For statepoints, fold deopt and gc arguments, but not call arguments.
489*5f7ddb14SDimitry Andric return std::make_pair(MI.getNumDefs(), StatepointOpers(&MI).getVarIdx());
490*5f7ddb14SDimitry Andric default:
491*5f7ddb14SDimitry Andric llvm_unreachable("unexpected stackmap opcode");
492*5f7ddb14SDimitry Andric }
4930b57cec5SDimitry Andric }
4940b57cec5SDimitry Andric
foldPatchpoint(MachineFunction & MF,MachineInstr & MI,ArrayRef<unsigned> Ops,int FrameIndex,const TargetInstrInfo & TII)4950b57cec5SDimitry Andric static MachineInstr *foldPatchpoint(MachineFunction &MF, MachineInstr &MI,
4960b57cec5SDimitry Andric ArrayRef<unsigned> Ops, int FrameIndex,
4970b57cec5SDimitry Andric const TargetInstrInfo &TII) {
4980b57cec5SDimitry Andric unsigned StartIdx = 0;
499af732203SDimitry Andric unsigned NumDefs = 0;
500*5f7ddb14SDimitry Andric // getPatchpointUnfoldableRange throws guarantee if MI is not a patchpoint.
501*5f7ddb14SDimitry Andric std::tie(NumDefs, StartIdx) = TII.getPatchpointUnfoldableRange(MI);
5020b57cec5SDimitry Andric
503af732203SDimitry Andric unsigned DefToFoldIdx = MI.getNumOperands();
504af732203SDimitry Andric
5050b57cec5SDimitry Andric // Return false if any operands requested for folding are not foldable (not
5060b57cec5SDimitry Andric // part of the stackmap's live values).
5070b57cec5SDimitry Andric for (unsigned Op : Ops) {
508af732203SDimitry Andric if (Op < NumDefs) {
509af732203SDimitry Andric assert(DefToFoldIdx == MI.getNumOperands() && "Folding multiple defs");
510af732203SDimitry Andric DefToFoldIdx = Op;
511af732203SDimitry Andric } else if (Op < StartIdx) {
512af732203SDimitry Andric return nullptr;
513af732203SDimitry Andric }
514af732203SDimitry Andric if (MI.getOperand(Op).isTied())
5150b57cec5SDimitry Andric return nullptr;
5160b57cec5SDimitry Andric }
5170b57cec5SDimitry Andric
5180b57cec5SDimitry Andric MachineInstr *NewMI =
5190b57cec5SDimitry Andric MF.CreateMachineInstr(TII.get(MI.getOpcode()), MI.getDebugLoc(), true);
5200b57cec5SDimitry Andric MachineInstrBuilder MIB(MF, NewMI);
5210b57cec5SDimitry Andric
5220b57cec5SDimitry Andric // No need to fold return, the meta data, and function arguments
5230b57cec5SDimitry Andric for (unsigned i = 0; i < StartIdx; ++i)
524af732203SDimitry Andric if (i != DefToFoldIdx)
5250b57cec5SDimitry Andric MIB.add(MI.getOperand(i));
5260b57cec5SDimitry Andric
527af732203SDimitry Andric for (unsigned i = StartIdx, e = MI.getNumOperands(); i < e; ++i) {
5280b57cec5SDimitry Andric MachineOperand &MO = MI.getOperand(i);
529af732203SDimitry Andric unsigned TiedTo = e;
530af732203SDimitry Andric (void)MI.isRegTiedToDefOperand(i, &TiedTo);
531af732203SDimitry Andric
5320b57cec5SDimitry Andric if (is_contained(Ops, i)) {
533af732203SDimitry Andric assert(TiedTo == e && "Cannot fold tied operands");
5340b57cec5SDimitry Andric unsigned SpillSize;
5350b57cec5SDimitry Andric unsigned SpillOffset;
5360b57cec5SDimitry Andric // Compute the spill slot size and offset.
5370b57cec5SDimitry Andric const TargetRegisterClass *RC =
5380b57cec5SDimitry Andric MF.getRegInfo().getRegClass(MO.getReg());
5390b57cec5SDimitry Andric bool Valid =
5400b57cec5SDimitry Andric TII.getStackSlotRange(RC, MO.getSubReg(), SpillSize, SpillOffset, MF);
5410b57cec5SDimitry Andric if (!Valid)
5420b57cec5SDimitry Andric report_fatal_error("cannot spill patchpoint subregister operand");
5430b57cec5SDimitry Andric MIB.addImm(StackMaps::IndirectMemRefOp);
5440b57cec5SDimitry Andric MIB.addImm(SpillSize);
5450b57cec5SDimitry Andric MIB.addFrameIndex(FrameIndex);
5460b57cec5SDimitry Andric MIB.addImm(SpillOffset);
547af732203SDimitry Andric } else {
5480b57cec5SDimitry Andric MIB.add(MO);
549af732203SDimitry Andric if (TiedTo < e) {
550af732203SDimitry Andric assert(TiedTo < NumDefs && "Bad tied operand");
551af732203SDimitry Andric if (TiedTo > DefToFoldIdx)
552af732203SDimitry Andric --TiedTo;
553af732203SDimitry Andric NewMI->tieOperands(TiedTo, NewMI->getNumOperands() - 1);
554af732203SDimitry Andric }
555af732203SDimitry Andric }
5560b57cec5SDimitry Andric }
5570b57cec5SDimitry Andric return NewMI;
5580b57cec5SDimitry Andric }
5590b57cec5SDimitry Andric
foldMemoryOperand(MachineInstr & MI,ArrayRef<unsigned> Ops,int FI,LiveIntervals * LIS,VirtRegMap * VRM) const5600b57cec5SDimitry Andric MachineInstr *TargetInstrInfo::foldMemoryOperand(MachineInstr &MI,
5610b57cec5SDimitry Andric ArrayRef<unsigned> Ops, int FI,
5620b57cec5SDimitry Andric LiveIntervals *LIS,
5630b57cec5SDimitry Andric VirtRegMap *VRM) const {
5640b57cec5SDimitry Andric auto Flags = MachineMemOperand::MONone;
5650b57cec5SDimitry Andric for (unsigned OpIdx : Ops)
5660b57cec5SDimitry Andric Flags |= MI.getOperand(OpIdx).isDef() ? MachineMemOperand::MOStore
5670b57cec5SDimitry Andric : MachineMemOperand::MOLoad;
5680b57cec5SDimitry Andric
5690b57cec5SDimitry Andric MachineBasicBlock *MBB = MI.getParent();
5700b57cec5SDimitry Andric assert(MBB && "foldMemoryOperand needs an inserted instruction");
5710b57cec5SDimitry Andric MachineFunction &MF = *MBB->getParent();
5720b57cec5SDimitry Andric
5730b57cec5SDimitry Andric // If we're not folding a load into a subreg, the size of the load is the
5740b57cec5SDimitry Andric // size of the spill slot. But if we are, we need to figure out what the
5750b57cec5SDimitry Andric // actual load size is.
5760b57cec5SDimitry Andric int64_t MemSize = 0;
5770b57cec5SDimitry Andric const MachineFrameInfo &MFI = MF.getFrameInfo();
5780b57cec5SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
5790b57cec5SDimitry Andric
5800b57cec5SDimitry Andric if (Flags & MachineMemOperand::MOStore) {
5810b57cec5SDimitry Andric MemSize = MFI.getObjectSize(FI);
5820b57cec5SDimitry Andric } else {
5830b57cec5SDimitry Andric for (unsigned OpIdx : Ops) {
5840b57cec5SDimitry Andric int64_t OpSize = MFI.getObjectSize(FI);
5850b57cec5SDimitry Andric
5860b57cec5SDimitry Andric if (auto SubReg = MI.getOperand(OpIdx).getSubReg()) {
5870b57cec5SDimitry Andric unsigned SubRegSize = TRI->getSubRegIdxSize(SubReg);
5880b57cec5SDimitry Andric if (SubRegSize > 0 && !(SubRegSize % 8))
5890b57cec5SDimitry Andric OpSize = SubRegSize / 8;
5900b57cec5SDimitry Andric }
5910b57cec5SDimitry Andric
5920b57cec5SDimitry Andric MemSize = std::max(MemSize, OpSize);
5930b57cec5SDimitry Andric }
5940b57cec5SDimitry Andric }
5950b57cec5SDimitry Andric
5960b57cec5SDimitry Andric assert(MemSize && "Did not expect a zero-sized stack slot");
5970b57cec5SDimitry Andric
5980b57cec5SDimitry Andric MachineInstr *NewMI = nullptr;
5990b57cec5SDimitry Andric
6000b57cec5SDimitry Andric if (MI.getOpcode() == TargetOpcode::STACKMAP ||
6010b57cec5SDimitry Andric MI.getOpcode() == TargetOpcode::PATCHPOINT ||
6020b57cec5SDimitry Andric MI.getOpcode() == TargetOpcode::STATEPOINT) {
6030b57cec5SDimitry Andric // Fold stackmap/patchpoint.
6040b57cec5SDimitry Andric NewMI = foldPatchpoint(MF, MI, Ops, FI, *this);
6050b57cec5SDimitry Andric if (NewMI)
6060b57cec5SDimitry Andric MBB->insert(MI, NewMI);
6070b57cec5SDimitry Andric } else {
6080b57cec5SDimitry Andric // Ask the target to do the actual folding.
6090b57cec5SDimitry Andric NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, FI, LIS, VRM);
6100b57cec5SDimitry Andric }
6110b57cec5SDimitry Andric
6120b57cec5SDimitry Andric if (NewMI) {
6130b57cec5SDimitry Andric NewMI->setMemRefs(MF, MI.memoperands());
6140b57cec5SDimitry Andric // Add a memory operand, foldMemoryOperandImpl doesn't do that.
6150b57cec5SDimitry Andric assert((!(Flags & MachineMemOperand::MOStore) ||
6160b57cec5SDimitry Andric NewMI->mayStore()) &&
6170b57cec5SDimitry Andric "Folded a def to a non-store!");
6180b57cec5SDimitry Andric assert((!(Flags & MachineMemOperand::MOLoad) ||
6190b57cec5SDimitry Andric NewMI->mayLoad()) &&
6200b57cec5SDimitry Andric "Folded a use to a non-load!");
6210b57cec5SDimitry Andric assert(MFI.getObjectOffset(FI) != -1);
6225ffd83dbSDimitry Andric MachineMemOperand *MMO =
6235ffd83dbSDimitry Andric MF.getMachineMemOperand(MachinePointerInfo::getFixedStack(MF, FI),
6245ffd83dbSDimitry Andric Flags, MemSize, MFI.getObjectAlign(FI));
6250b57cec5SDimitry Andric NewMI->addMemOperand(MF, MMO);
6260b57cec5SDimitry Andric
6275ffd83dbSDimitry Andric // The pass "x86 speculative load hardening" always attaches symbols to
6285ffd83dbSDimitry Andric // call instructions. We need copy it form old instruction.
6295ffd83dbSDimitry Andric NewMI->cloneInstrSymbols(MF, MI);
6305ffd83dbSDimitry Andric
6310b57cec5SDimitry Andric return NewMI;
6320b57cec5SDimitry Andric }
6330b57cec5SDimitry Andric
6340b57cec5SDimitry Andric // Straight COPY may fold as load/store.
6350b57cec5SDimitry Andric if (!MI.isCopy() || Ops.size() != 1)
6360b57cec5SDimitry Andric return nullptr;
6370b57cec5SDimitry Andric
6380b57cec5SDimitry Andric const TargetRegisterClass *RC = canFoldCopy(MI, Ops[0]);
6390b57cec5SDimitry Andric if (!RC)
6400b57cec5SDimitry Andric return nullptr;
6410b57cec5SDimitry Andric
6420b57cec5SDimitry Andric const MachineOperand &MO = MI.getOperand(1 - Ops[0]);
6430b57cec5SDimitry Andric MachineBasicBlock::iterator Pos = MI;
6440b57cec5SDimitry Andric
6450b57cec5SDimitry Andric if (Flags == MachineMemOperand::MOStore)
6460b57cec5SDimitry Andric storeRegToStackSlot(*MBB, Pos, MO.getReg(), MO.isKill(), FI, RC, TRI);
6470b57cec5SDimitry Andric else
6480b57cec5SDimitry Andric loadRegFromStackSlot(*MBB, Pos, MO.getReg(), FI, RC, TRI);
6490b57cec5SDimitry Andric return &*--Pos;
6500b57cec5SDimitry Andric }
6510b57cec5SDimitry Andric
foldMemoryOperand(MachineInstr & MI,ArrayRef<unsigned> Ops,MachineInstr & LoadMI,LiveIntervals * LIS) const6520b57cec5SDimitry Andric MachineInstr *TargetInstrInfo::foldMemoryOperand(MachineInstr &MI,
6530b57cec5SDimitry Andric ArrayRef<unsigned> Ops,
6540b57cec5SDimitry Andric MachineInstr &LoadMI,
6550b57cec5SDimitry Andric LiveIntervals *LIS) const {
6560b57cec5SDimitry Andric assert(LoadMI.canFoldAsLoad() && "LoadMI isn't foldable!");
6570b57cec5SDimitry Andric #ifndef NDEBUG
6580b57cec5SDimitry Andric for (unsigned OpIdx : Ops)
6590b57cec5SDimitry Andric assert(MI.getOperand(OpIdx).isUse() && "Folding load into def!");
6600b57cec5SDimitry Andric #endif
6610b57cec5SDimitry Andric
6620b57cec5SDimitry Andric MachineBasicBlock &MBB = *MI.getParent();
6630b57cec5SDimitry Andric MachineFunction &MF = *MBB.getParent();
6640b57cec5SDimitry Andric
6650b57cec5SDimitry Andric // Ask the target to do the actual folding.
6660b57cec5SDimitry Andric MachineInstr *NewMI = nullptr;
6670b57cec5SDimitry Andric int FrameIndex = 0;
6680b57cec5SDimitry Andric
6690b57cec5SDimitry Andric if ((MI.getOpcode() == TargetOpcode::STACKMAP ||
6700b57cec5SDimitry Andric MI.getOpcode() == TargetOpcode::PATCHPOINT ||
6710b57cec5SDimitry Andric MI.getOpcode() == TargetOpcode::STATEPOINT) &&
6720b57cec5SDimitry Andric isLoadFromStackSlot(LoadMI, FrameIndex)) {
6730b57cec5SDimitry Andric // Fold stackmap/patchpoint.
6740b57cec5SDimitry Andric NewMI = foldPatchpoint(MF, MI, Ops, FrameIndex, *this);
6750b57cec5SDimitry Andric if (NewMI)
6760b57cec5SDimitry Andric NewMI = &*MBB.insert(MI, NewMI);
6770b57cec5SDimitry Andric } else {
6780b57cec5SDimitry Andric // Ask the target to do the actual folding.
6790b57cec5SDimitry Andric NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI, LIS);
6800b57cec5SDimitry Andric }
6810b57cec5SDimitry Andric
6820b57cec5SDimitry Andric if (!NewMI)
6830b57cec5SDimitry Andric return nullptr;
6840b57cec5SDimitry Andric
6850b57cec5SDimitry Andric // Copy the memoperands from the load to the folded instruction.
6860b57cec5SDimitry Andric if (MI.memoperands_empty()) {
6870b57cec5SDimitry Andric NewMI->setMemRefs(MF, LoadMI.memoperands());
6880b57cec5SDimitry Andric } else {
6890b57cec5SDimitry Andric // Handle the rare case of folding multiple loads.
6900b57cec5SDimitry Andric NewMI->setMemRefs(MF, MI.memoperands());
6910b57cec5SDimitry Andric for (MachineInstr::mmo_iterator I = LoadMI.memoperands_begin(),
6920b57cec5SDimitry Andric E = LoadMI.memoperands_end();
6930b57cec5SDimitry Andric I != E; ++I) {
6940b57cec5SDimitry Andric NewMI->addMemOperand(MF, *I);
6950b57cec5SDimitry Andric }
6960b57cec5SDimitry Andric }
6970b57cec5SDimitry Andric return NewMI;
6980b57cec5SDimitry Andric }
6990b57cec5SDimitry Andric
hasReassociableOperands(const MachineInstr & Inst,const MachineBasicBlock * MBB) const7000b57cec5SDimitry Andric bool TargetInstrInfo::hasReassociableOperands(
7010b57cec5SDimitry Andric const MachineInstr &Inst, const MachineBasicBlock *MBB) const {
7020b57cec5SDimitry Andric const MachineOperand &Op1 = Inst.getOperand(1);
7030b57cec5SDimitry Andric const MachineOperand &Op2 = Inst.getOperand(2);
7040b57cec5SDimitry Andric const MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo();
7050b57cec5SDimitry Andric
7060b57cec5SDimitry Andric // We need virtual register definitions for the operands that we will
7070b57cec5SDimitry Andric // reassociate.
7080b57cec5SDimitry Andric MachineInstr *MI1 = nullptr;
7090b57cec5SDimitry Andric MachineInstr *MI2 = nullptr;
7108bcb0991SDimitry Andric if (Op1.isReg() && Register::isVirtualRegister(Op1.getReg()))
7110b57cec5SDimitry Andric MI1 = MRI.getUniqueVRegDef(Op1.getReg());
7128bcb0991SDimitry Andric if (Op2.isReg() && Register::isVirtualRegister(Op2.getReg()))
7130b57cec5SDimitry Andric MI2 = MRI.getUniqueVRegDef(Op2.getReg());
7140b57cec5SDimitry Andric
7150b57cec5SDimitry Andric // And they need to be in the trace (otherwise, they won't have a depth).
7160b57cec5SDimitry Andric return MI1 && MI2 && MI1->getParent() == MBB && MI2->getParent() == MBB;
7170b57cec5SDimitry Andric }
7180b57cec5SDimitry Andric
hasReassociableSibling(const MachineInstr & Inst,bool & Commuted) const7190b57cec5SDimitry Andric bool TargetInstrInfo::hasReassociableSibling(const MachineInstr &Inst,
7200b57cec5SDimitry Andric bool &Commuted) const {
7210b57cec5SDimitry Andric const MachineBasicBlock *MBB = Inst.getParent();
7220b57cec5SDimitry Andric const MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo();
7230b57cec5SDimitry Andric MachineInstr *MI1 = MRI.getUniqueVRegDef(Inst.getOperand(1).getReg());
7240b57cec5SDimitry Andric MachineInstr *MI2 = MRI.getUniqueVRegDef(Inst.getOperand(2).getReg());
7250b57cec5SDimitry Andric unsigned AssocOpcode = Inst.getOpcode();
7260b57cec5SDimitry Andric
7270b57cec5SDimitry Andric // If only one operand has the same opcode and it's the second source operand,
7280b57cec5SDimitry Andric // the operands must be commuted.
7290b57cec5SDimitry Andric Commuted = MI1->getOpcode() != AssocOpcode && MI2->getOpcode() == AssocOpcode;
7300b57cec5SDimitry Andric if (Commuted)
7310b57cec5SDimitry Andric std::swap(MI1, MI2);
7320b57cec5SDimitry Andric
7330b57cec5SDimitry Andric // 1. The previous instruction must be the same type as Inst.
7345ffd83dbSDimitry Andric // 2. The previous instruction must also be associative/commutative (this can
7355ffd83dbSDimitry Andric // be different even for instructions with the same opcode if traits like
7365ffd83dbSDimitry Andric // fast-math-flags are included).
7375ffd83dbSDimitry Andric // 3. The previous instruction must have virtual register definitions for its
7380b57cec5SDimitry Andric // operands in the same basic block as Inst.
7395ffd83dbSDimitry Andric // 4. The previous instruction's result must only be used by Inst.
7405ffd83dbSDimitry Andric return MI1->getOpcode() == AssocOpcode && isAssociativeAndCommutative(*MI1) &&
7410b57cec5SDimitry Andric hasReassociableOperands(*MI1, MBB) &&
7420b57cec5SDimitry Andric MRI.hasOneNonDBGUse(MI1->getOperand(0).getReg());
7430b57cec5SDimitry Andric }
7440b57cec5SDimitry Andric
7450b57cec5SDimitry Andric // 1. The operation must be associative and commutative.
7460b57cec5SDimitry Andric // 2. The instruction must have virtual register definitions for its
7470b57cec5SDimitry Andric // operands in the same basic block.
7480b57cec5SDimitry Andric // 3. The instruction must have a reassociable sibling.
isReassociationCandidate(const MachineInstr & Inst,bool & Commuted) const7490b57cec5SDimitry Andric bool TargetInstrInfo::isReassociationCandidate(const MachineInstr &Inst,
7500b57cec5SDimitry Andric bool &Commuted) const {
7510b57cec5SDimitry Andric return isAssociativeAndCommutative(Inst) &&
7520b57cec5SDimitry Andric hasReassociableOperands(Inst, Inst.getParent()) &&
7530b57cec5SDimitry Andric hasReassociableSibling(Inst, Commuted);
7540b57cec5SDimitry Andric }
7550b57cec5SDimitry Andric
7560b57cec5SDimitry Andric // The concept of the reassociation pass is that these operations can benefit
7570b57cec5SDimitry Andric // from this kind of transformation:
7580b57cec5SDimitry Andric //
7590b57cec5SDimitry Andric // A = ? op ?
7600b57cec5SDimitry Andric // B = A op X (Prev)
7610b57cec5SDimitry Andric // C = B op Y (Root)
7620b57cec5SDimitry Andric // -->
7630b57cec5SDimitry Andric // A = ? op ?
7640b57cec5SDimitry Andric // B = X op Y
7650b57cec5SDimitry Andric // C = A op B
7660b57cec5SDimitry Andric //
7670b57cec5SDimitry Andric // breaking the dependency between A and B, allowing them to be executed in
7680b57cec5SDimitry Andric // parallel (or back-to-back in a pipeline) instead of depending on each other.
7690b57cec5SDimitry Andric
7700b57cec5SDimitry Andric // FIXME: This has the potential to be expensive (compile time) while not
7710b57cec5SDimitry Andric // improving the code at all. Some ways to limit the overhead:
7720b57cec5SDimitry Andric // 1. Track successful transforms; bail out if hit rate gets too low.
7730b57cec5SDimitry Andric // 2. Only enable at -O3 or some other non-default optimization level.
7740b57cec5SDimitry Andric // 3. Pre-screen pattern candidates here: if an operand of the previous
7750b57cec5SDimitry Andric // instruction is known to not increase the critical path, then don't match
7760b57cec5SDimitry Andric // that pattern.
getMachineCombinerPatterns(MachineInstr & Root,SmallVectorImpl<MachineCombinerPattern> & Patterns,bool DoRegPressureReduce) const7770b57cec5SDimitry Andric bool TargetInstrInfo::getMachineCombinerPatterns(
778af732203SDimitry Andric MachineInstr &Root, SmallVectorImpl<MachineCombinerPattern> &Patterns,
779af732203SDimitry Andric bool DoRegPressureReduce) const {
7800b57cec5SDimitry Andric bool Commute;
7810b57cec5SDimitry Andric if (isReassociationCandidate(Root, Commute)) {
7820b57cec5SDimitry Andric // We found a sequence of instructions that may be suitable for a
7830b57cec5SDimitry Andric // reassociation of operands to increase ILP. Specify each commutation
7840b57cec5SDimitry Andric // possibility for the Prev instruction in the sequence and let the
7850b57cec5SDimitry Andric // machine combiner decide if changing the operands is worthwhile.
7860b57cec5SDimitry Andric if (Commute) {
7870b57cec5SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_AX_YB);
7880b57cec5SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_XA_YB);
7890b57cec5SDimitry Andric } else {
7900b57cec5SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_AX_BY);
7910b57cec5SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_XA_BY);
7920b57cec5SDimitry Andric }
7930b57cec5SDimitry Andric return true;
7940b57cec5SDimitry Andric }
7950b57cec5SDimitry Andric
7960b57cec5SDimitry Andric return false;
7970b57cec5SDimitry Andric }
7980b57cec5SDimitry Andric
7990b57cec5SDimitry Andric /// Return true when a code sequence can improve loop throughput.
8000b57cec5SDimitry Andric bool
isThroughputPattern(MachineCombinerPattern Pattern) const8010b57cec5SDimitry Andric TargetInstrInfo::isThroughputPattern(MachineCombinerPattern Pattern) const {
8020b57cec5SDimitry Andric return false;
8030b57cec5SDimitry Andric }
8040b57cec5SDimitry Andric
8050b57cec5SDimitry Andric /// Attempt the reassociation transformation to reduce critical path length.
8060b57cec5SDimitry Andric /// See the above comments before getMachineCombinerPatterns().
reassociateOps(MachineInstr & Root,MachineInstr & Prev,MachineCombinerPattern Pattern,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,DenseMap<unsigned,unsigned> & InstrIdxForVirtReg) const8070b57cec5SDimitry Andric void TargetInstrInfo::reassociateOps(
8080b57cec5SDimitry Andric MachineInstr &Root, MachineInstr &Prev,
8090b57cec5SDimitry Andric MachineCombinerPattern Pattern,
8100b57cec5SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
8110b57cec5SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
8120b57cec5SDimitry Andric DenseMap<unsigned, unsigned> &InstrIdxForVirtReg) const {
8130b57cec5SDimitry Andric MachineFunction *MF = Root.getMF();
8140b57cec5SDimitry Andric MachineRegisterInfo &MRI = MF->getRegInfo();
8150b57cec5SDimitry Andric const TargetInstrInfo *TII = MF->getSubtarget().getInstrInfo();
8160b57cec5SDimitry Andric const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo();
8170b57cec5SDimitry Andric const TargetRegisterClass *RC = Root.getRegClassConstraint(0, TII, TRI);
8180b57cec5SDimitry Andric
8190b57cec5SDimitry Andric // This array encodes the operand index for each parameter because the
8200b57cec5SDimitry Andric // operands may be commuted. Each row corresponds to a pattern value,
8210b57cec5SDimitry Andric // and each column specifies the index of A, B, X, Y.
8220b57cec5SDimitry Andric unsigned OpIdx[4][4] = {
8230b57cec5SDimitry Andric { 1, 1, 2, 2 },
8240b57cec5SDimitry Andric { 1, 2, 2, 1 },
8250b57cec5SDimitry Andric { 2, 1, 1, 2 },
8260b57cec5SDimitry Andric { 2, 2, 1, 1 }
8270b57cec5SDimitry Andric };
8280b57cec5SDimitry Andric
8290b57cec5SDimitry Andric int Row;
8300b57cec5SDimitry Andric switch (Pattern) {
8310b57cec5SDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY: Row = 0; break;
8320b57cec5SDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB: Row = 1; break;
8330b57cec5SDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY: Row = 2; break;
8340b57cec5SDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB: Row = 3; break;
8350b57cec5SDimitry Andric default: llvm_unreachable("unexpected MachineCombinerPattern");
8360b57cec5SDimitry Andric }
8370b57cec5SDimitry Andric
8380b57cec5SDimitry Andric MachineOperand &OpA = Prev.getOperand(OpIdx[Row][0]);
8390b57cec5SDimitry Andric MachineOperand &OpB = Root.getOperand(OpIdx[Row][1]);
8400b57cec5SDimitry Andric MachineOperand &OpX = Prev.getOperand(OpIdx[Row][2]);
8410b57cec5SDimitry Andric MachineOperand &OpY = Root.getOperand(OpIdx[Row][3]);
8420b57cec5SDimitry Andric MachineOperand &OpC = Root.getOperand(0);
8430b57cec5SDimitry Andric
8448bcb0991SDimitry Andric Register RegA = OpA.getReg();
8458bcb0991SDimitry Andric Register RegB = OpB.getReg();
8468bcb0991SDimitry Andric Register RegX = OpX.getReg();
8478bcb0991SDimitry Andric Register RegY = OpY.getReg();
8488bcb0991SDimitry Andric Register RegC = OpC.getReg();
8490b57cec5SDimitry Andric
8508bcb0991SDimitry Andric if (Register::isVirtualRegister(RegA))
8510b57cec5SDimitry Andric MRI.constrainRegClass(RegA, RC);
8528bcb0991SDimitry Andric if (Register::isVirtualRegister(RegB))
8530b57cec5SDimitry Andric MRI.constrainRegClass(RegB, RC);
8548bcb0991SDimitry Andric if (Register::isVirtualRegister(RegX))
8550b57cec5SDimitry Andric MRI.constrainRegClass(RegX, RC);
8568bcb0991SDimitry Andric if (Register::isVirtualRegister(RegY))
8570b57cec5SDimitry Andric MRI.constrainRegClass(RegY, RC);
8588bcb0991SDimitry Andric if (Register::isVirtualRegister(RegC))
8590b57cec5SDimitry Andric MRI.constrainRegClass(RegC, RC);
8600b57cec5SDimitry Andric
8610b57cec5SDimitry Andric // Create a new virtual register for the result of (X op Y) instead of
8620b57cec5SDimitry Andric // recycling RegB because the MachineCombiner's computation of the critical
8630b57cec5SDimitry Andric // path requires a new register definition rather than an existing one.
8648bcb0991SDimitry Andric Register NewVR = MRI.createVirtualRegister(RC);
8650b57cec5SDimitry Andric InstrIdxForVirtReg.insert(std::make_pair(NewVR, 0));
8660b57cec5SDimitry Andric
8670b57cec5SDimitry Andric unsigned Opcode = Root.getOpcode();
8680b57cec5SDimitry Andric bool KillA = OpA.isKill();
8690b57cec5SDimitry Andric bool KillX = OpX.isKill();
8700b57cec5SDimitry Andric bool KillY = OpY.isKill();
8710b57cec5SDimitry Andric
8720b57cec5SDimitry Andric // Create new instructions for insertion.
8730b57cec5SDimitry Andric MachineInstrBuilder MIB1 =
8740b57cec5SDimitry Andric BuildMI(*MF, Prev.getDebugLoc(), TII->get(Opcode), NewVR)
8750b57cec5SDimitry Andric .addReg(RegX, getKillRegState(KillX))
8760b57cec5SDimitry Andric .addReg(RegY, getKillRegState(KillY));
8770b57cec5SDimitry Andric MachineInstrBuilder MIB2 =
8780b57cec5SDimitry Andric BuildMI(*MF, Root.getDebugLoc(), TII->get(Opcode), RegC)
8790b57cec5SDimitry Andric .addReg(RegA, getKillRegState(KillA))
8800b57cec5SDimitry Andric .addReg(NewVR, getKillRegState(true));
8810b57cec5SDimitry Andric
8820b57cec5SDimitry Andric setSpecialOperandAttr(Root, Prev, *MIB1, *MIB2);
8830b57cec5SDimitry Andric
8840b57cec5SDimitry Andric // Record new instructions for insertion and old instructions for deletion.
8850b57cec5SDimitry Andric InsInstrs.push_back(MIB1);
8860b57cec5SDimitry Andric InsInstrs.push_back(MIB2);
8870b57cec5SDimitry Andric DelInstrs.push_back(&Prev);
8880b57cec5SDimitry Andric DelInstrs.push_back(&Root);
8890b57cec5SDimitry Andric }
8900b57cec5SDimitry Andric
genAlternativeCodeSequence(MachineInstr & Root,MachineCombinerPattern Pattern,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,DenseMap<unsigned,unsigned> & InstIdxForVirtReg) const8910b57cec5SDimitry Andric void TargetInstrInfo::genAlternativeCodeSequence(
8920b57cec5SDimitry Andric MachineInstr &Root, MachineCombinerPattern Pattern,
8930b57cec5SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
8940b57cec5SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
8950b57cec5SDimitry Andric DenseMap<unsigned, unsigned> &InstIdxForVirtReg) const {
8960b57cec5SDimitry Andric MachineRegisterInfo &MRI = Root.getMF()->getRegInfo();
8970b57cec5SDimitry Andric
8980b57cec5SDimitry Andric // Select the previous instruction in the sequence based on the input pattern.
8990b57cec5SDimitry Andric MachineInstr *Prev = nullptr;
9000b57cec5SDimitry Andric switch (Pattern) {
9010b57cec5SDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY:
9020b57cec5SDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY:
9030b57cec5SDimitry Andric Prev = MRI.getUniqueVRegDef(Root.getOperand(1).getReg());
9040b57cec5SDimitry Andric break;
9050b57cec5SDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB:
9060b57cec5SDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB:
9070b57cec5SDimitry Andric Prev = MRI.getUniqueVRegDef(Root.getOperand(2).getReg());
9080b57cec5SDimitry Andric break;
9090b57cec5SDimitry Andric default:
9100b57cec5SDimitry Andric break;
9110b57cec5SDimitry Andric }
9120b57cec5SDimitry Andric
9130b57cec5SDimitry Andric assert(Prev && "Unknown pattern for machine combiner");
9140b57cec5SDimitry Andric
9150b57cec5SDimitry Andric reassociateOps(Root, *Prev, Pattern, InsInstrs, DelInstrs, InstIdxForVirtReg);
9160b57cec5SDimitry Andric }
9170b57cec5SDimitry Andric
isReallyTriviallyReMaterializableGeneric(const MachineInstr & MI,AAResults * AA) const9180b57cec5SDimitry Andric bool TargetInstrInfo::isReallyTriviallyReMaterializableGeneric(
9198bcb0991SDimitry Andric const MachineInstr &MI, AAResults *AA) const {
9200b57cec5SDimitry Andric const MachineFunction &MF = *MI.getMF();
9210b57cec5SDimitry Andric const MachineRegisterInfo &MRI = MF.getRegInfo();
9220b57cec5SDimitry Andric
9230b57cec5SDimitry Andric // Remat clients assume operand 0 is the defined register.
9240b57cec5SDimitry Andric if (!MI.getNumOperands() || !MI.getOperand(0).isReg())
9250b57cec5SDimitry Andric return false;
9268bcb0991SDimitry Andric Register DefReg = MI.getOperand(0).getReg();
9270b57cec5SDimitry Andric
9280b57cec5SDimitry Andric // A sub-register definition can only be rematerialized if the instruction
9290b57cec5SDimitry Andric // doesn't read the other parts of the register. Otherwise it is really a
9300b57cec5SDimitry Andric // read-modify-write operation on the full virtual register which cannot be
9310b57cec5SDimitry Andric // moved safely.
9328bcb0991SDimitry Andric if (Register::isVirtualRegister(DefReg) && MI.getOperand(0).getSubReg() &&
9338bcb0991SDimitry Andric MI.readsVirtualRegister(DefReg))
9340b57cec5SDimitry Andric return false;
9350b57cec5SDimitry Andric
9360b57cec5SDimitry Andric // A load from a fixed stack slot can be rematerialized. This may be
9370b57cec5SDimitry Andric // redundant with subsequent checks, but it's target-independent,
9380b57cec5SDimitry Andric // simple, and a common case.
9390b57cec5SDimitry Andric int FrameIdx = 0;
9400b57cec5SDimitry Andric if (isLoadFromStackSlot(MI, FrameIdx) &&
9410b57cec5SDimitry Andric MF.getFrameInfo().isImmutableObjectIndex(FrameIdx))
9420b57cec5SDimitry Andric return true;
9430b57cec5SDimitry Andric
9440b57cec5SDimitry Andric // Avoid instructions obviously unsafe for remat.
9450b57cec5SDimitry Andric if (MI.isNotDuplicable() || MI.mayStore() || MI.mayRaiseFPException() ||
9460b57cec5SDimitry Andric MI.hasUnmodeledSideEffects())
9470b57cec5SDimitry Andric return false;
9480b57cec5SDimitry Andric
9490b57cec5SDimitry Andric // Don't remat inline asm. We have no idea how expensive it is
9500b57cec5SDimitry Andric // even if it's side effect free.
9510b57cec5SDimitry Andric if (MI.isInlineAsm())
9520b57cec5SDimitry Andric return false;
9530b57cec5SDimitry Andric
9540b57cec5SDimitry Andric // Avoid instructions which load from potentially varying memory.
9550b57cec5SDimitry Andric if (MI.mayLoad() && !MI.isDereferenceableInvariantLoad(AA))
9560b57cec5SDimitry Andric return false;
9570b57cec5SDimitry Andric
9580b57cec5SDimitry Andric // If any of the registers accessed are non-constant, conservatively assume
9590b57cec5SDimitry Andric // the instruction is not rematerializable.
9600b57cec5SDimitry Andric for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
9610b57cec5SDimitry Andric const MachineOperand &MO = MI.getOperand(i);
9620b57cec5SDimitry Andric if (!MO.isReg()) continue;
9638bcb0991SDimitry Andric Register Reg = MO.getReg();
9640b57cec5SDimitry Andric if (Reg == 0)
9650b57cec5SDimitry Andric continue;
9660b57cec5SDimitry Andric
9670b57cec5SDimitry Andric // Check for a well-behaved physical register.
9688bcb0991SDimitry Andric if (Register::isPhysicalRegister(Reg)) {
9690b57cec5SDimitry Andric if (MO.isUse()) {
9700b57cec5SDimitry Andric // If the physreg has no defs anywhere, it's just an ambient register
9710b57cec5SDimitry Andric // and we can freely move its uses. Alternatively, if it's allocatable,
9720b57cec5SDimitry Andric // it could get allocated to something with a def during allocation.
9730b57cec5SDimitry Andric if (!MRI.isConstantPhysReg(Reg))
9740b57cec5SDimitry Andric return false;
9750b57cec5SDimitry Andric } else {
9760b57cec5SDimitry Andric // A physreg def. We can't remat it.
9770b57cec5SDimitry Andric return false;
9780b57cec5SDimitry Andric }
9790b57cec5SDimitry Andric continue;
9800b57cec5SDimitry Andric }
9810b57cec5SDimitry Andric
9820b57cec5SDimitry Andric // Only allow one virtual-register def. There may be multiple defs of the
9830b57cec5SDimitry Andric // same virtual register, though.
9840b57cec5SDimitry Andric if (MO.isDef() && Reg != DefReg)
9850b57cec5SDimitry Andric return false;
9860b57cec5SDimitry Andric
9870b57cec5SDimitry Andric // Don't allow any virtual-register uses. Rematting an instruction with
9880b57cec5SDimitry Andric // virtual register uses would length the live ranges of the uses, which
9890b57cec5SDimitry Andric // is not necessarily a good idea, certainly not "trivial".
9900b57cec5SDimitry Andric if (MO.isUse())
9910b57cec5SDimitry Andric return false;
9920b57cec5SDimitry Andric }
9930b57cec5SDimitry Andric
9940b57cec5SDimitry Andric // Everything checked out.
9950b57cec5SDimitry Andric return true;
9960b57cec5SDimitry Andric }
9970b57cec5SDimitry Andric
getSPAdjust(const MachineInstr & MI) const9980b57cec5SDimitry Andric int TargetInstrInfo::getSPAdjust(const MachineInstr &MI) const {
9990b57cec5SDimitry Andric const MachineFunction *MF = MI.getMF();
10000b57cec5SDimitry Andric const TargetFrameLowering *TFI = MF->getSubtarget().getFrameLowering();
10010b57cec5SDimitry Andric bool StackGrowsDown =
10020b57cec5SDimitry Andric TFI->getStackGrowthDirection() == TargetFrameLowering::StackGrowsDown;
10030b57cec5SDimitry Andric
10040b57cec5SDimitry Andric unsigned FrameSetupOpcode = getCallFrameSetupOpcode();
10050b57cec5SDimitry Andric unsigned FrameDestroyOpcode = getCallFrameDestroyOpcode();
10060b57cec5SDimitry Andric
10070b57cec5SDimitry Andric if (!isFrameInstr(MI))
10080b57cec5SDimitry Andric return 0;
10090b57cec5SDimitry Andric
10100b57cec5SDimitry Andric int SPAdj = TFI->alignSPAdjust(getFrameSize(MI));
10110b57cec5SDimitry Andric
10120b57cec5SDimitry Andric if ((!StackGrowsDown && MI.getOpcode() == FrameSetupOpcode) ||
10130b57cec5SDimitry Andric (StackGrowsDown && MI.getOpcode() == FrameDestroyOpcode))
10140b57cec5SDimitry Andric SPAdj = -SPAdj;
10150b57cec5SDimitry Andric
10160b57cec5SDimitry Andric return SPAdj;
10170b57cec5SDimitry Andric }
10180b57cec5SDimitry Andric
10190b57cec5SDimitry Andric /// isSchedulingBoundary - Test if the given instruction should be
10200b57cec5SDimitry Andric /// considered a scheduling boundary. This primarily includes labels
10210b57cec5SDimitry Andric /// and terminators.
isSchedulingBoundary(const MachineInstr & MI,const MachineBasicBlock * MBB,const MachineFunction & MF) const10220b57cec5SDimitry Andric bool TargetInstrInfo::isSchedulingBoundary(const MachineInstr &MI,
10230b57cec5SDimitry Andric const MachineBasicBlock *MBB,
10240b57cec5SDimitry Andric const MachineFunction &MF) const {
10250b57cec5SDimitry Andric // Terminators and labels can't be scheduled around.
10260b57cec5SDimitry Andric if (MI.isTerminator() || MI.isPosition())
10270b57cec5SDimitry Andric return true;
10280b57cec5SDimitry Andric
10295ffd83dbSDimitry Andric // INLINEASM_BR can jump to another block
10305ffd83dbSDimitry Andric if (MI.getOpcode() == TargetOpcode::INLINEASM_BR)
10315ffd83dbSDimitry Andric return true;
10325ffd83dbSDimitry Andric
10330b57cec5SDimitry Andric // Don't attempt to schedule around any instruction that defines
10340b57cec5SDimitry Andric // a stack-oriented pointer, as it's unlikely to be profitable. This
10350b57cec5SDimitry Andric // saves compile time, because it doesn't require every single
10360b57cec5SDimitry Andric // stack slot reference to depend on the instruction that does the
10370b57cec5SDimitry Andric // modification.
10380b57cec5SDimitry Andric const TargetLowering &TLI = *MF.getSubtarget().getTargetLowering();
10390b57cec5SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
10400b57cec5SDimitry Andric return MI.modifiesRegister(TLI.getStackPointerRegisterToSaveRestore(), TRI);
10410b57cec5SDimitry Andric }
10420b57cec5SDimitry Andric
10430b57cec5SDimitry Andric // Provide a global flag for disabling the PreRA hazard recognizer that targets
10440b57cec5SDimitry Andric // may choose to honor.
usePreRAHazardRecognizer() const10450b57cec5SDimitry Andric bool TargetInstrInfo::usePreRAHazardRecognizer() const {
10460b57cec5SDimitry Andric return !DisableHazardRecognizer;
10470b57cec5SDimitry Andric }
10480b57cec5SDimitry Andric
10490b57cec5SDimitry Andric // Default implementation of CreateTargetRAHazardRecognizer.
10500b57cec5SDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::
CreateTargetHazardRecognizer(const TargetSubtargetInfo * STI,const ScheduleDAG * DAG) const10510b57cec5SDimitry Andric CreateTargetHazardRecognizer(const TargetSubtargetInfo *STI,
10520b57cec5SDimitry Andric const ScheduleDAG *DAG) const {
10530b57cec5SDimitry Andric // Dummy hazard recognizer allows all instructions to issue.
10540b57cec5SDimitry Andric return new ScheduleHazardRecognizer();
10550b57cec5SDimitry Andric }
10560b57cec5SDimitry Andric
10570b57cec5SDimitry Andric // Default implementation of CreateTargetMIHazardRecognizer.
CreateTargetMIHazardRecognizer(const InstrItineraryData * II,const ScheduleDAGMI * DAG) const1058480093f4SDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::CreateTargetMIHazardRecognizer(
1059480093f4SDimitry Andric const InstrItineraryData *II, const ScheduleDAGMI *DAG) const {
1060480093f4SDimitry Andric return new ScoreboardHazardRecognizer(II, DAG, "machine-scheduler");
10610b57cec5SDimitry Andric }
10620b57cec5SDimitry Andric
10630b57cec5SDimitry Andric // Default implementation of CreateTargetPostRAHazardRecognizer.
10640b57cec5SDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::
CreateTargetPostRAHazardRecognizer(const InstrItineraryData * II,const ScheduleDAG * DAG) const10650b57cec5SDimitry Andric CreateTargetPostRAHazardRecognizer(const InstrItineraryData *II,
10660b57cec5SDimitry Andric const ScheduleDAG *DAG) const {
1067480093f4SDimitry Andric return new ScoreboardHazardRecognizer(II, DAG, "post-RA-sched");
10680b57cec5SDimitry Andric }
10690b57cec5SDimitry Andric
10705ffd83dbSDimitry Andric // Default implementation of getMemOperandWithOffset.
getMemOperandWithOffset(const MachineInstr & MI,const MachineOperand * & BaseOp,int64_t & Offset,bool & OffsetIsScalable,const TargetRegisterInfo * TRI) const10715ffd83dbSDimitry Andric bool TargetInstrInfo::getMemOperandWithOffset(
10725ffd83dbSDimitry Andric const MachineInstr &MI, const MachineOperand *&BaseOp, int64_t &Offset,
10735ffd83dbSDimitry Andric bool &OffsetIsScalable, const TargetRegisterInfo *TRI) const {
10745ffd83dbSDimitry Andric SmallVector<const MachineOperand *, 4> BaseOps;
10755ffd83dbSDimitry Andric unsigned Width;
10765ffd83dbSDimitry Andric if (!getMemOperandsWithOffsetWidth(MI, BaseOps, Offset, OffsetIsScalable,
10775ffd83dbSDimitry Andric Width, TRI) ||
10785ffd83dbSDimitry Andric BaseOps.size() != 1)
10795ffd83dbSDimitry Andric return false;
10805ffd83dbSDimitry Andric BaseOp = BaseOps.front();
10815ffd83dbSDimitry Andric return true;
10825ffd83dbSDimitry Andric }
10835ffd83dbSDimitry Andric
10840b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
10850b57cec5SDimitry Andric // SelectionDAG latency interface.
10860b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
10870b57cec5SDimitry Andric
10880b57cec5SDimitry Andric int
getOperandLatency(const InstrItineraryData * ItinData,SDNode * DefNode,unsigned DefIdx,SDNode * UseNode,unsigned UseIdx) const10890b57cec5SDimitry Andric TargetInstrInfo::getOperandLatency(const InstrItineraryData *ItinData,
10900b57cec5SDimitry Andric SDNode *DefNode, unsigned DefIdx,
10910b57cec5SDimitry Andric SDNode *UseNode, unsigned UseIdx) const {
10920b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty())
10930b57cec5SDimitry Andric return -1;
10940b57cec5SDimitry Andric
10950b57cec5SDimitry Andric if (!DefNode->isMachineOpcode())
10960b57cec5SDimitry Andric return -1;
10970b57cec5SDimitry Andric
10980b57cec5SDimitry Andric unsigned DefClass = get(DefNode->getMachineOpcode()).getSchedClass();
10990b57cec5SDimitry Andric if (!UseNode->isMachineOpcode())
11000b57cec5SDimitry Andric return ItinData->getOperandCycle(DefClass, DefIdx);
11010b57cec5SDimitry Andric unsigned UseClass = get(UseNode->getMachineOpcode()).getSchedClass();
11020b57cec5SDimitry Andric return ItinData->getOperandLatency(DefClass, DefIdx, UseClass, UseIdx);
11030b57cec5SDimitry Andric }
11040b57cec5SDimitry Andric
getInstrLatency(const InstrItineraryData * ItinData,SDNode * N) const11050b57cec5SDimitry Andric int TargetInstrInfo::getInstrLatency(const InstrItineraryData *ItinData,
11060b57cec5SDimitry Andric SDNode *N) const {
11070b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty())
11080b57cec5SDimitry Andric return 1;
11090b57cec5SDimitry Andric
11100b57cec5SDimitry Andric if (!N->isMachineOpcode())
11110b57cec5SDimitry Andric return 1;
11120b57cec5SDimitry Andric
11130b57cec5SDimitry Andric return ItinData->getStageLatency(get(N->getMachineOpcode()).getSchedClass());
11140b57cec5SDimitry Andric }
11150b57cec5SDimitry Andric
11160b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
11170b57cec5SDimitry Andric // MachineInstr latency interface.
11180b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
11190b57cec5SDimitry Andric
getNumMicroOps(const InstrItineraryData * ItinData,const MachineInstr & MI) const11200b57cec5SDimitry Andric unsigned TargetInstrInfo::getNumMicroOps(const InstrItineraryData *ItinData,
11210b57cec5SDimitry Andric const MachineInstr &MI) const {
11220b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty())
11230b57cec5SDimitry Andric return 1;
11240b57cec5SDimitry Andric
11250b57cec5SDimitry Andric unsigned Class = MI.getDesc().getSchedClass();
11260b57cec5SDimitry Andric int UOps = ItinData->Itineraries[Class].NumMicroOps;
11270b57cec5SDimitry Andric if (UOps >= 0)
11280b57cec5SDimitry Andric return UOps;
11290b57cec5SDimitry Andric
11300b57cec5SDimitry Andric // The # of u-ops is dynamically determined. The specific target should
11310b57cec5SDimitry Andric // override this function to return the right number.
11320b57cec5SDimitry Andric return 1;
11330b57cec5SDimitry Andric }
11340b57cec5SDimitry Andric
11350b57cec5SDimitry Andric /// Return the default expected latency for a def based on it's opcode.
defaultDefLatency(const MCSchedModel & SchedModel,const MachineInstr & DefMI) const11360b57cec5SDimitry Andric unsigned TargetInstrInfo::defaultDefLatency(const MCSchedModel &SchedModel,
11370b57cec5SDimitry Andric const MachineInstr &DefMI) const {
11380b57cec5SDimitry Andric if (DefMI.isTransient())
11390b57cec5SDimitry Andric return 0;
11400b57cec5SDimitry Andric if (DefMI.mayLoad())
11410b57cec5SDimitry Andric return SchedModel.LoadLatency;
11420b57cec5SDimitry Andric if (isHighLatencyDef(DefMI.getOpcode()))
11430b57cec5SDimitry Andric return SchedModel.HighLatency;
11440b57cec5SDimitry Andric return 1;
11450b57cec5SDimitry Andric }
11460b57cec5SDimitry Andric
getPredicationCost(const MachineInstr &) const11470b57cec5SDimitry Andric unsigned TargetInstrInfo::getPredicationCost(const MachineInstr &) const {
11480b57cec5SDimitry Andric return 0;
11490b57cec5SDimitry Andric }
11500b57cec5SDimitry Andric
getInstrLatency(const InstrItineraryData * ItinData,const MachineInstr & MI,unsigned * PredCost) const11510b57cec5SDimitry Andric unsigned TargetInstrInfo::getInstrLatency(const InstrItineraryData *ItinData,
11520b57cec5SDimitry Andric const MachineInstr &MI,
11530b57cec5SDimitry Andric unsigned *PredCost) const {
11540b57cec5SDimitry Andric // Default to one cycle for no itinerary. However, an "empty" itinerary may
11550b57cec5SDimitry Andric // still have a MinLatency property, which getStageLatency checks.
11560b57cec5SDimitry Andric if (!ItinData)
11570b57cec5SDimitry Andric return MI.mayLoad() ? 2 : 1;
11580b57cec5SDimitry Andric
11590b57cec5SDimitry Andric return ItinData->getStageLatency(MI.getDesc().getSchedClass());
11600b57cec5SDimitry Andric }
11610b57cec5SDimitry Andric
hasLowDefLatency(const TargetSchedModel & SchedModel,const MachineInstr & DefMI,unsigned DefIdx) const11620b57cec5SDimitry Andric bool TargetInstrInfo::hasLowDefLatency(const TargetSchedModel &SchedModel,
11630b57cec5SDimitry Andric const MachineInstr &DefMI,
11640b57cec5SDimitry Andric unsigned DefIdx) const {
11650b57cec5SDimitry Andric const InstrItineraryData *ItinData = SchedModel.getInstrItineraries();
11660b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty())
11670b57cec5SDimitry Andric return false;
11680b57cec5SDimitry Andric
11690b57cec5SDimitry Andric unsigned DefClass = DefMI.getDesc().getSchedClass();
11700b57cec5SDimitry Andric int DefCycle = ItinData->getOperandCycle(DefClass, DefIdx);
11710b57cec5SDimitry Andric return (DefCycle != -1 && DefCycle <= 1);
11720b57cec5SDimitry Andric }
11730b57cec5SDimitry Andric
11748bcb0991SDimitry Andric Optional<ParamLoadedValue>
describeLoadedValue(const MachineInstr & MI,Register Reg) const1175480093f4SDimitry Andric TargetInstrInfo::describeLoadedValue(const MachineInstr &MI,
1176480093f4SDimitry Andric Register Reg) const {
11778bcb0991SDimitry Andric const MachineFunction *MF = MI.getMF();
1178480093f4SDimitry Andric const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo();
1179480093f4SDimitry Andric DIExpression *Expr = DIExpression::get(MF->getFunction().getContext(), {});
1180480093f4SDimitry Andric int64_t Offset;
11815ffd83dbSDimitry Andric bool OffsetIsScalable;
11828bcb0991SDimitry Andric
1183480093f4SDimitry Andric // To simplify the sub-register handling, verify that we only need to
1184480093f4SDimitry Andric // consider physical registers.
1185480093f4SDimitry Andric assert(MF->getProperties().hasProperty(
1186480093f4SDimitry Andric MachineFunctionProperties::Property::NoVRegs));
1187480093f4SDimitry Andric
1188480093f4SDimitry Andric if (auto DestSrc = isCopyInstr(MI)) {
1189480093f4SDimitry Andric Register DestReg = DestSrc->Destination->getReg();
1190480093f4SDimitry Andric
11915ffd83dbSDimitry Andric // If the copy destination is the forwarding reg, describe the forwarding
11925ffd83dbSDimitry Andric // reg using the copy source as the backup location. Example:
11935ffd83dbSDimitry Andric //
11945ffd83dbSDimitry Andric // x0 = MOV x7
11955ffd83dbSDimitry Andric // call callee(x0) ; x0 described as x7
1196480093f4SDimitry Andric if (Reg == DestReg)
1197480093f4SDimitry Andric return ParamLoadedValue(*DestSrc->Source, Expr);
1198480093f4SDimitry Andric
1199480093f4SDimitry Andric // Cases where super- or sub-registers needs to be described should
1200480093f4SDimitry Andric // be handled by the target's hook implementation.
1201480093f4SDimitry Andric assert(!TRI->isSuperOrSubRegisterEq(Reg, DestReg) &&
1202480093f4SDimitry Andric "TargetInstrInfo::describeLoadedValue can't describe super- or "
1203480093f4SDimitry Andric "sub-regs for copy instructions");
1204480093f4SDimitry Andric return None;
1205480093f4SDimitry Andric } else if (auto RegImm = isAddImmediate(MI, Reg)) {
1206480093f4SDimitry Andric Register SrcReg = RegImm->Reg;
1207480093f4SDimitry Andric Offset = RegImm->Imm;
1208480093f4SDimitry Andric Expr = DIExpression::prepend(Expr, DIExpression::ApplyOffset, Offset);
1209480093f4SDimitry Andric return ParamLoadedValue(MachineOperand::CreateReg(SrcReg, false), Expr);
1210480093f4SDimitry Andric } else if (MI.hasOneMemOperand()) {
1211480093f4SDimitry Andric // Only describe memory which provably does not escape the function. As
1212480093f4SDimitry Andric // described in llvm.org/PR43343, escaped memory may be clobbered by the
1213480093f4SDimitry Andric // callee (or by another thread).
1214480093f4SDimitry Andric const auto &TII = MF->getSubtarget().getInstrInfo();
1215480093f4SDimitry Andric const MachineFrameInfo &MFI = MF->getFrameInfo();
1216480093f4SDimitry Andric const MachineMemOperand *MMO = MI.memoperands()[0];
1217480093f4SDimitry Andric const PseudoSourceValue *PSV = MMO->getPseudoValue();
1218480093f4SDimitry Andric
1219480093f4SDimitry Andric // If the address points to "special" memory (e.g. a spill slot), it's
1220480093f4SDimitry Andric // sufficient to check that it isn't aliased by any high-level IR value.
1221480093f4SDimitry Andric if (!PSV || PSV->mayAlias(&MFI))
1222480093f4SDimitry Andric return None;
1223480093f4SDimitry Andric
1224480093f4SDimitry Andric const MachineOperand *BaseOp;
12255ffd83dbSDimitry Andric if (!TII->getMemOperandWithOffset(MI, BaseOp, Offset, OffsetIsScalable,
12265ffd83dbSDimitry Andric TRI))
1227480093f4SDimitry Andric return None;
1228480093f4SDimitry Andric
12295ffd83dbSDimitry Andric // FIXME: Scalable offsets are not yet handled in the offset code below.
12305ffd83dbSDimitry Andric if (OffsetIsScalable)
12315ffd83dbSDimitry Andric return None;
12325ffd83dbSDimitry Andric
12335ffd83dbSDimitry Andric // TODO: Can currently only handle mem instructions with a single define.
12345ffd83dbSDimitry Andric // An example from the x86 target:
12355ffd83dbSDimitry Andric // ...
12365ffd83dbSDimitry Andric // DIV64m $rsp, 1, $noreg, 24, $noreg, implicit-def dead $rax, implicit-def $rdx
12375ffd83dbSDimitry Andric // ...
12385ffd83dbSDimitry Andric //
12395ffd83dbSDimitry Andric if (MI.getNumExplicitDefs() != 1)
12405ffd83dbSDimitry Andric return None;
1241480093f4SDimitry Andric
1242480093f4SDimitry Andric // TODO: In what way do we need to take Reg into consideration here?
1243480093f4SDimitry Andric
1244480093f4SDimitry Andric SmallVector<uint64_t, 8> Ops;
1245480093f4SDimitry Andric DIExpression::appendOffset(Ops, Offset);
1246480093f4SDimitry Andric Ops.push_back(dwarf::DW_OP_deref_size);
1247480093f4SDimitry Andric Ops.push_back(MMO->getSize());
1248480093f4SDimitry Andric Expr = DIExpression::prependOpcodes(Expr, Ops);
1249480093f4SDimitry Andric return ParamLoadedValue(*BaseOp, Expr);
12508bcb0991SDimitry Andric }
12518bcb0991SDimitry Andric
12528bcb0991SDimitry Andric return None;
12538bcb0991SDimitry Andric }
12548bcb0991SDimitry Andric
12550b57cec5SDimitry Andric /// Both DefMI and UseMI must be valid. By default, call directly to the
12560b57cec5SDimitry Andric /// itinerary. This may be overriden by the target.
getOperandLatency(const InstrItineraryData * ItinData,const MachineInstr & DefMI,unsigned DefIdx,const MachineInstr & UseMI,unsigned UseIdx) const12570b57cec5SDimitry Andric int TargetInstrInfo::getOperandLatency(const InstrItineraryData *ItinData,
12580b57cec5SDimitry Andric const MachineInstr &DefMI,
12590b57cec5SDimitry Andric unsigned DefIdx,
12600b57cec5SDimitry Andric const MachineInstr &UseMI,
12610b57cec5SDimitry Andric unsigned UseIdx) const {
12620b57cec5SDimitry Andric unsigned DefClass = DefMI.getDesc().getSchedClass();
12630b57cec5SDimitry Andric unsigned UseClass = UseMI.getDesc().getSchedClass();
12640b57cec5SDimitry Andric return ItinData->getOperandLatency(DefClass, DefIdx, UseClass, UseIdx);
12650b57cec5SDimitry Andric }
12660b57cec5SDimitry Andric
12670b57cec5SDimitry Andric /// If we can determine the operand latency from the def only, without itinerary
12680b57cec5SDimitry Andric /// lookup, do so. Otherwise return -1.
computeDefOperandLatency(const InstrItineraryData * ItinData,const MachineInstr & DefMI) const12690b57cec5SDimitry Andric int TargetInstrInfo::computeDefOperandLatency(
12700b57cec5SDimitry Andric const InstrItineraryData *ItinData, const MachineInstr &DefMI) const {
12710b57cec5SDimitry Andric
12720b57cec5SDimitry Andric // Let the target hook getInstrLatency handle missing itineraries.
12730b57cec5SDimitry Andric if (!ItinData)
12740b57cec5SDimitry Andric return getInstrLatency(ItinData, DefMI);
12750b57cec5SDimitry Andric
12760b57cec5SDimitry Andric if(ItinData->isEmpty())
12770b57cec5SDimitry Andric return defaultDefLatency(ItinData->SchedModel, DefMI);
12780b57cec5SDimitry Andric
12790b57cec5SDimitry Andric // ...operand lookup required
12800b57cec5SDimitry Andric return -1;
12810b57cec5SDimitry Andric }
12820b57cec5SDimitry Andric
getRegSequenceInputs(const MachineInstr & MI,unsigned DefIdx,SmallVectorImpl<RegSubRegPairAndIdx> & InputRegs) const12830b57cec5SDimitry Andric bool TargetInstrInfo::getRegSequenceInputs(
12840b57cec5SDimitry Andric const MachineInstr &MI, unsigned DefIdx,
12850b57cec5SDimitry Andric SmallVectorImpl<RegSubRegPairAndIdx> &InputRegs) const {
12860b57cec5SDimitry Andric assert((MI.isRegSequence() ||
12870b57cec5SDimitry Andric MI.isRegSequenceLike()) && "Instruction do not have the proper type");
12880b57cec5SDimitry Andric
12890b57cec5SDimitry Andric if (!MI.isRegSequence())
12900b57cec5SDimitry Andric return getRegSequenceLikeInputs(MI, DefIdx, InputRegs);
12910b57cec5SDimitry Andric
12920b57cec5SDimitry Andric // We are looking at:
12930b57cec5SDimitry Andric // Def = REG_SEQUENCE v0, sub0, v1, sub1, ...
12940b57cec5SDimitry Andric assert(DefIdx == 0 && "REG_SEQUENCE only has one def");
12950b57cec5SDimitry Andric for (unsigned OpIdx = 1, EndOpIdx = MI.getNumOperands(); OpIdx != EndOpIdx;
12960b57cec5SDimitry Andric OpIdx += 2) {
12970b57cec5SDimitry Andric const MachineOperand &MOReg = MI.getOperand(OpIdx);
12980b57cec5SDimitry Andric if (MOReg.isUndef())
12990b57cec5SDimitry Andric continue;
13000b57cec5SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(OpIdx + 1);
13010b57cec5SDimitry Andric assert(MOSubIdx.isImm() &&
13020b57cec5SDimitry Andric "One of the subindex of the reg_sequence is not an immediate");
13030b57cec5SDimitry Andric // Record Reg:SubReg, SubIdx.
13040b57cec5SDimitry Andric InputRegs.push_back(RegSubRegPairAndIdx(MOReg.getReg(), MOReg.getSubReg(),
13050b57cec5SDimitry Andric (unsigned)MOSubIdx.getImm()));
13060b57cec5SDimitry Andric }
13070b57cec5SDimitry Andric return true;
13080b57cec5SDimitry Andric }
13090b57cec5SDimitry Andric
getExtractSubregInputs(const MachineInstr & MI,unsigned DefIdx,RegSubRegPairAndIdx & InputReg) const13100b57cec5SDimitry Andric bool TargetInstrInfo::getExtractSubregInputs(
13110b57cec5SDimitry Andric const MachineInstr &MI, unsigned DefIdx,
13120b57cec5SDimitry Andric RegSubRegPairAndIdx &InputReg) const {
13130b57cec5SDimitry Andric assert((MI.isExtractSubreg() ||
13140b57cec5SDimitry Andric MI.isExtractSubregLike()) && "Instruction do not have the proper type");
13150b57cec5SDimitry Andric
13160b57cec5SDimitry Andric if (!MI.isExtractSubreg())
13170b57cec5SDimitry Andric return getExtractSubregLikeInputs(MI, DefIdx, InputReg);
13180b57cec5SDimitry Andric
13190b57cec5SDimitry Andric // We are looking at:
13200b57cec5SDimitry Andric // Def = EXTRACT_SUBREG v0.sub1, sub0.
13210b57cec5SDimitry Andric assert(DefIdx == 0 && "EXTRACT_SUBREG only has one def");
13220b57cec5SDimitry Andric const MachineOperand &MOReg = MI.getOperand(1);
13230b57cec5SDimitry Andric if (MOReg.isUndef())
13240b57cec5SDimitry Andric return false;
13250b57cec5SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(2);
13260b57cec5SDimitry Andric assert(MOSubIdx.isImm() &&
13270b57cec5SDimitry Andric "The subindex of the extract_subreg is not an immediate");
13280b57cec5SDimitry Andric
13290b57cec5SDimitry Andric InputReg.Reg = MOReg.getReg();
13300b57cec5SDimitry Andric InputReg.SubReg = MOReg.getSubReg();
13310b57cec5SDimitry Andric InputReg.SubIdx = (unsigned)MOSubIdx.getImm();
13320b57cec5SDimitry Andric return true;
13330b57cec5SDimitry Andric }
13340b57cec5SDimitry Andric
getInsertSubregInputs(const MachineInstr & MI,unsigned DefIdx,RegSubRegPair & BaseReg,RegSubRegPairAndIdx & InsertedReg) const13350b57cec5SDimitry Andric bool TargetInstrInfo::getInsertSubregInputs(
13360b57cec5SDimitry Andric const MachineInstr &MI, unsigned DefIdx,
13370b57cec5SDimitry Andric RegSubRegPair &BaseReg, RegSubRegPairAndIdx &InsertedReg) const {
13380b57cec5SDimitry Andric assert((MI.isInsertSubreg() ||
13390b57cec5SDimitry Andric MI.isInsertSubregLike()) && "Instruction do not have the proper type");
13400b57cec5SDimitry Andric
13410b57cec5SDimitry Andric if (!MI.isInsertSubreg())
13420b57cec5SDimitry Andric return getInsertSubregLikeInputs(MI, DefIdx, BaseReg, InsertedReg);
13430b57cec5SDimitry Andric
13440b57cec5SDimitry Andric // We are looking at:
13450b57cec5SDimitry Andric // Def = INSERT_SEQUENCE v0, v1, sub0.
13460b57cec5SDimitry Andric assert(DefIdx == 0 && "INSERT_SUBREG only has one def");
13470b57cec5SDimitry Andric const MachineOperand &MOBaseReg = MI.getOperand(1);
13480b57cec5SDimitry Andric const MachineOperand &MOInsertedReg = MI.getOperand(2);
13490b57cec5SDimitry Andric if (MOInsertedReg.isUndef())
13500b57cec5SDimitry Andric return false;
13510b57cec5SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(3);
13520b57cec5SDimitry Andric assert(MOSubIdx.isImm() &&
13530b57cec5SDimitry Andric "One of the subindex of the reg_sequence is not an immediate");
13540b57cec5SDimitry Andric BaseReg.Reg = MOBaseReg.getReg();
13550b57cec5SDimitry Andric BaseReg.SubReg = MOBaseReg.getSubReg();
13560b57cec5SDimitry Andric
13570b57cec5SDimitry Andric InsertedReg.Reg = MOInsertedReg.getReg();
13580b57cec5SDimitry Andric InsertedReg.SubReg = MOInsertedReg.getSubReg();
13590b57cec5SDimitry Andric InsertedReg.SubIdx = (unsigned)MOSubIdx.getImm();
13600b57cec5SDimitry Andric return true;
13610b57cec5SDimitry Andric }
13628bcb0991SDimitry Andric
13635ffd83dbSDimitry Andric // Returns a MIRPrinter comment for this machine operand.
createMIROperandComment(const MachineInstr & MI,const MachineOperand & Op,unsigned OpIdx,const TargetRegisterInfo * TRI) const13645ffd83dbSDimitry Andric std::string TargetInstrInfo::createMIROperandComment(
13655ffd83dbSDimitry Andric const MachineInstr &MI, const MachineOperand &Op, unsigned OpIdx,
13665ffd83dbSDimitry Andric const TargetRegisterInfo *TRI) const {
13675ffd83dbSDimitry Andric
13685ffd83dbSDimitry Andric if (!MI.isInlineAsm())
13695ffd83dbSDimitry Andric return "";
13705ffd83dbSDimitry Andric
13715ffd83dbSDimitry Andric std::string Flags;
13725ffd83dbSDimitry Andric raw_string_ostream OS(Flags);
13735ffd83dbSDimitry Andric
13745ffd83dbSDimitry Andric if (OpIdx == InlineAsm::MIOp_ExtraInfo) {
13755ffd83dbSDimitry Andric // Print HasSideEffects, MayLoad, MayStore, IsAlignStack
13765ffd83dbSDimitry Andric unsigned ExtraInfo = Op.getImm();
13775ffd83dbSDimitry Andric bool First = true;
13785ffd83dbSDimitry Andric for (StringRef Info : InlineAsm::getExtraInfoNames(ExtraInfo)) {
13795ffd83dbSDimitry Andric if (!First)
13805ffd83dbSDimitry Andric OS << " ";
13815ffd83dbSDimitry Andric First = false;
13825ffd83dbSDimitry Andric OS << Info;
13835ffd83dbSDimitry Andric }
13845ffd83dbSDimitry Andric
13855ffd83dbSDimitry Andric return OS.str();
13865ffd83dbSDimitry Andric }
13875ffd83dbSDimitry Andric
13885ffd83dbSDimitry Andric int FlagIdx = MI.findInlineAsmFlagIdx(OpIdx);
13895ffd83dbSDimitry Andric if (FlagIdx < 0 || (unsigned)FlagIdx != OpIdx)
13905ffd83dbSDimitry Andric return "";
13915ffd83dbSDimitry Andric
13925ffd83dbSDimitry Andric assert(Op.isImm() && "Expected flag operand to be an immediate");
13935ffd83dbSDimitry Andric // Pretty print the inline asm operand descriptor.
13945ffd83dbSDimitry Andric unsigned Flag = Op.getImm();
13955ffd83dbSDimitry Andric unsigned Kind = InlineAsm::getKind(Flag);
13965ffd83dbSDimitry Andric OS << InlineAsm::getKindName(Kind);
13975ffd83dbSDimitry Andric
13985ffd83dbSDimitry Andric unsigned RCID = 0;
13995ffd83dbSDimitry Andric if (!InlineAsm::isImmKind(Flag) && !InlineAsm::isMemKind(Flag) &&
14005ffd83dbSDimitry Andric InlineAsm::hasRegClassConstraint(Flag, RCID)) {
14015ffd83dbSDimitry Andric if (TRI) {
14025ffd83dbSDimitry Andric OS << ':' << TRI->getRegClassName(TRI->getRegClass(RCID));
14035ffd83dbSDimitry Andric } else
14045ffd83dbSDimitry Andric OS << ":RC" << RCID;
14055ffd83dbSDimitry Andric }
14065ffd83dbSDimitry Andric
14075ffd83dbSDimitry Andric if (InlineAsm::isMemKind(Flag)) {
14085ffd83dbSDimitry Andric unsigned MCID = InlineAsm::getMemoryConstraintID(Flag);
14095ffd83dbSDimitry Andric OS << ":" << InlineAsm::getMemConstraintName(MCID);
14105ffd83dbSDimitry Andric }
14115ffd83dbSDimitry Andric
14125ffd83dbSDimitry Andric unsigned TiedTo = 0;
14135ffd83dbSDimitry Andric if (InlineAsm::isUseOperandTiedToDef(Flag, TiedTo))
14145ffd83dbSDimitry Andric OS << " tiedto:$" << TiedTo;
14155ffd83dbSDimitry Andric
14165ffd83dbSDimitry Andric return OS.str();
14175ffd83dbSDimitry Andric }
14185ffd83dbSDimitry Andric
~PipelinerLoopInfo()14198bcb0991SDimitry Andric TargetInstrInfo::PipelinerLoopInfo::~PipelinerLoopInfo() {}
1420