1*9880d681SAndroid Build Coastguard Worker //===-- SIFoldOperands.cpp - Fold operands --- ----------------------------===//
2*9880d681SAndroid Build Coastguard Worker //
3*9880d681SAndroid Build Coastguard Worker // The LLVM Compiler Infrastructure
4*9880d681SAndroid Build Coastguard Worker //
5*9880d681SAndroid Build Coastguard Worker // This file is distributed under the University of Illinois Open Source
6*9880d681SAndroid Build Coastguard Worker // License. See LICENSE.TXT for details.
7*9880d681SAndroid Build Coastguard Worker //
8*9880d681SAndroid Build Coastguard Worker /// \file
9*9880d681SAndroid Build Coastguard Worker //===----------------------------------------------------------------------===//
10*9880d681SAndroid Build Coastguard Worker //
11*9880d681SAndroid Build Coastguard Worker
12*9880d681SAndroid Build Coastguard Worker #include "AMDGPU.h"
13*9880d681SAndroid Build Coastguard Worker #include "AMDGPUSubtarget.h"
14*9880d681SAndroid Build Coastguard Worker #include "SIInstrInfo.h"
15*9880d681SAndroid Build Coastguard Worker #include "llvm/CodeGen/LiveIntervalAnalysis.h"
16*9880d681SAndroid Build Coastguard Worker #include "llvm/CodeGen/MachineFunctionPass.h"
17*9880d681SAndroid Build Coastguard Worker #include "llvm/CodeGen/MachineInstrBuilder.h"
18*9880d681SAndroid Build Coastguard Worker #include "llvm/CodeGen/MachineRegisterInfo.h"
19*9880d681SAndroid Build Coastguard Worker #include "llvm/Support/Debug.h"
20*9880d681SAndroid Build Coastguard Worker #include "llvm/Support/raw_ostream.h"
21*9880d681SAndroid Build Coastguard Worker #include "llvm/Target/TargetMachine.h"
22*9880d681SAndroid Build Coastguard Worker
23*9880d681SAndroid Build Coastguard Worker #define DEBUG_TYPE "si-fold-operands"
24*9880d681SAndroid Build Coastguard Worker using namespace llvm;
25*9880d681SAndroid Build Coastguard Worker
26*9880d681SAndroid Build Coastguard Worker namespace {
27*9880d681SAndroid Build Coastguard Worker
28*9880d681SAndroid Build Coastguard Worker class SIFoldOperands : public MachineFunctionPass {
29*9880d681SAndroid Build Coastguard Worker public:
30*9880d681SAndroid Build Coastguard Worker static char ID;
31*9880d681SAndroid Build Coastguard Worker
32*9880d681SAndroid Build Coastguard Worker public:
SIFoldOperands()33*9880d681SAndroid Build Coastguard Worker SIFoldOperands() : MachineFunctionPass(ID) {
34*9880d681SAndroid Build Coastguard Worker initializeSIFoldOperandsPass(*PassRegistry::getPassRegistry());
35*9880d681SAndroid Build Coastguard Worker }
36*9880d681SAndroid Build Coastguard Worker
37*9880d681SAndroid Build Coastguard Worker bool runOnMachineFunction(MachineFunction &MF) override;
38*9880d681SAndroid Build Coastguard Worker
getPassName() const39*9880d681SAndroid Build Coastguard Worker const char *getPassName() const override {
40*9880d681SAndroid Build Coastguard Worker return "SI Fold Operands";
41*9880d681SAndroid Build Coastguard Worker }
42*9880d681SAndroid Build Coastguard Worker
getAnalysisUsage(AnalysisUsage & AU) const43*9880d681SAndroid Build Coastguard Worker void getAnalysisUsage(AnalysisUsage &AU) const override {
44*9880d681SAndroid Build Coastguard Worker AU.setPreservesCFG();
45*9880d681SAndroid Build Coastguard Worker MachineFunctionPass::getAnalysisUsage(AU);
46*9880d681SAndroid Build Coastguard Worker }
47*9880d681SAndroid Build Coastguard Worker };
48*9880d681SAndroid Build Coastguard Worker
49*9880d681SAndroid Build Coastguard Worker struct FoldCandidate {
50*9880d681SAndroid Build Coastguard Worker MachineInstr *UseMI;
51*9880d681SAndroid Build Coastguard Worker unsigned UseOpNo;
52*9880d681SAndroid Build Coastguard Worker MachineOperand *OpToFold;
53*9880d681SAndroid Build Coastguard Worker uint64_t ImmToFold;
54*9880d681SAndroid Build Coastguard Worker
FoldCandidate__anon7de84b080111::FoldCandidate55*9880d681SAndroid Build Coastguard Worker FoldCandidate(MachineInstr *MI, unsigned OpNo, MachineOperand *FoldOp) :
56*9880d681SAndroid Build Coastguard Worker UseMI(MI), UseOpNo(OpNo) {
57*9880d681SAndroid Build Coastguard Worker
58*9880d681SAndroid Build Coastguard Worker if (FoldOp->isImm()) {
59*9880d681SAndroid Build Coastguard Worker OpToFold = nullptr;
60*9880d681SAndroid Build Coastguard Worker ImmToFold = FoldOp->getImm();
61*9880d681SAndroid Build Coastguard Worker } else {
62*9880d681SAndroid Build Coastguard Worker assert(FoldOp->isReg());
63*9880d681SAndroid Build Coastguard Worker OpToFold = FoldOp;
64*9880d681SAndroid Build Coastguard Worker }
65*9880d681SAndroid Build Coastguard Worker }
66*9880d681SAndroid Build Coastguard Worker
isImm__anon7de84b080111::FoldCandidate67*9880d681SAndroid Build Coastguard Worker bool isImm() const {
68*9880d681SAndroid Build Coastguard Worker return !OpToFold;
69*9880d681SAndroid Build Coastguard Worker }
70*9880d681SAndroid Build Coastguard Worker };
71*9880d681SAndroid Build Coastguard Worker
72*9880d681SAndroid Build Coastguard Worker } // End anonymous namespace.
73*9880d681SAndroid Build Coastguard Worker
74*9880d681SAndroid Build Coastguard Worker INITIALIZE_PASS(SIFoldOperands, DEBUG_TYPE,
75*9880d681SAndroid Build Coastguard Worker "SI Fold Operands", false, false)
76*9880d681SAndroid Build Coastguard Worker
77*9880d681SAndroid Build Coastguard Worker char SIFoldOperands::ID = 0;
78*9880d681SAndroid Build Coastguard Worker
79*9880d681SAndroid Build Coastguard Worker char &llvm::SIFoldOperandsID = SIFoldOperands::ID;
80*9880d681SAndroid Build Coastguard Worker
createSIFoldOperandsPass()81*9880d681SAndroid Build Coastguard Worker FunctionPass *llvm::createSIFoldOperandsPass() {
82*9880d681SAndroid Build Coastguard Worker return new SIFoldOperands();
83*9880d681SAndroid Build Coastguard Worker }
84*9880d681SAndroid Build Coastguard Worker
isSafeToFold(unsigned Opcode)85*9880d681SAndroid Build Coastguard Worker static bool isSafeToFold(unsigned Opcode) {
86*9880d681SAndroid Build Coastguard Worker switch(Opcode) {
87*9880d681SAndroid Build Coastguard Worker case AMDGPU::V_MOV_B32_e32:
88*9880d681SAndroid Build Coastguard Worker case AMDGPU::V_MOV_B32_e64:
89*9880d681SAndroid Build Coastguard Worker case AMDGPU::V_MOV_B64_PSEUDO:
90*9880d681SAndroid Build Coastguard Worker case AMDGPU::S_MOV_B32:
91*9880d681SAndroid Build Coastguard Worker case AMDGPU::S_MOV_B64:
92*9880d681SAndroid Build Coastguard Worker case AMDGPU::COPY:
93*9880d681SAndroid Build Coastguard Worker return true;
94*9880d681SAndroid Build Coastguard Worker default:
95*9880d681SAndroid Build Coastguard Worker return false;
96*9880d681SAndroid Build Coastguard Worker }
97*9880d681SAndroid Build Coastguard Worker }
98*9880d681SAndroid Build Coastguard Worker
updateOperand(FoldCandidate & Fold,const TargetRegisterInfo & TRI)99*9880d681SAndroid Build Coastguard Worker static bool updateOperand(FoldCandidate &Fold,
100*9880d681SAndroid Build Coastguard Worker const TargetRegisterInfo &TRI) {
101*9880d681SAndroid Build Coastguard Worker MachineInstr *MI = Fold.UseMI;
102*9880d681SAndroid Build Coastguard Worker MachineOperand &Old = MI->getOperand(Fold.UseOpNo);
103*9880d681SAndroid Build Coastguard Worker assert(Old.isReg());
104*9880d681SAndroid Build Coastguard Worker
105*9880d681SAndroid Build Coastguard Worker if (Fold.isImm()) {
106*9880d681SAndroid Build Coastguard Worker Old.ChangeToImmediate(Fold.ImmToFold);
107*9880d681SAndroid Build Coastguard Worker return true;
108*9880d681SAndroid Build Coastguard Worker }
109*9880d681SAndroid Build Coastguard Worker
110*9880d681SAndroid Build Coastguard Worker MachineOperand *New = Fold.OpToFold;
111*9880d681SAndroid Build Coastguard Worker if (TargetRegisterInfo::isVirtualRegister(Old.getReg()) &&
112*9880d681SAndroid Build Coastguard Worker TargetRegisterInfo::isVirtualRegister(New->getReg())) {
113*9880d681SAndroid Build Coastguard Worker Old.substVirtReg(New->getReg(), New->getSubReg(), TRI);
114*9880d681SAndroid Build Coastguard Worker return true;
115*9880d681SAndroid Build Coastguard Worker }
116*9880d681SAndroid Build Coastguard Worker
117*9880d681SAndroid Build Coastguard Worker // FIXME: Handle physical registers.
118*9880d681SAndroid Build Coastguard Worker
119*9880d681SAndroid Build Coastguard Worker return false;
120*9880d681SAndroid Build Coastguard Worker }
121*9880d681SAndroid Build Coastguard Worker
isUseMIInFoldList(const std::vector<FoldCandidate> & FoldList,const MachineInstr * MI)122*9880d681SAndroid Build Coastguard Worker static bool isUseMIInFoldList(const std::vector<FoldCandidate> &FoldList,
123*9880d681SAndroid Build Coastguard Worker const MachineInstr *MI) {
124*9880d681SAndroid Build Coastguard Worker for (auto Candidate : FoldList) {
125*9880d681SAndroid Build Coastguard Worker if (Candidate.UseMI == MI)
126*9880d681SAndroid Build Coastguard Worker return true;
127*9880d681SAndroid Build Coastguard Worker }
128*9880d681SAndroid Build Coastguard Worker return false;
129*9880d681SAndroid Build Coastguard Worker }
130*9880d681SAndroid Build Coastguard Worker
tryAddToFoldList(std::vector<FoldCandidate> & FoldList,MachineInstr * MI,unsigned OpNo,MachineOperand * OpToFold,const SIInstrInfo * TII)131*9880d681SAndroid Build Coastguard Worker static bool tryAddToFoldList(std::vector<FoldCandidate> &FoldList,
132*9880d681SAndroid Build Coastguard Worker MachineInstr *MI, unsigned OpNo,
133*9880d681SAndroid Build Coastguard Worker MachineOperand *OpToFold,
134*9880d681SAndroid Build Coastguard Worker const SIInstrInfo *TII) {
135*9880d681SAndroid Build Coastguard Worker if (!TII->isOperandLegal(*MI, OpNo, OpToFold)) {
136*9880d681SAndroid Build Coastguard Worker
137*9880d681SAndroid Build Coastguard Worker // Special case for v_mac_f32_e64 if we are trying to fold into src2
138*9880d681SAndroid Build Coastguard Worker unsigned Opc = MI->getOpcode();
139*9880d681SAndroid Build Coastguard Worker if (Opc == AMDGPU::V_MAC_F32_e64 &&
140*9880d681SAndroid Build Coastguard Worker (int)OpNo == AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::src2)) {
141*9880d681SAndroid Build Coastguard Worker // Check if changing this to a v_mad_f32 instruction will allow us to
142*9880d681SAndroid Build Coastguard Worker // fold the operand.
143*9880d681SAndroid Build Coastguard Worker MI->setDesc(TII->get(AMDGPU::V_MAD_F32));
144*9880d681SAndroid Build Coastguard Worker bool FoldAsMAD = tryAddToFoldList(FoldList, MI, OpNo, OpToFold, TII);
145*9880d681SAndroid Build Coastguard Worker if (FoldAsMAD) {
146*9880d681SAndroid Build Coastguard Worker MI->untieRegOperand(OpNo);
147*9880d681SAndroid Build Coastguard Worker return true;
148*9880d681SAndroid Build Coastguard Worker }
149*9880d681SAndroid Build Coastguard Worker MI->setDesc(TII->get(Opc));
150*9880d681SAndroid Build Coastguard Worker }
151*9880d681SAndroid Build Coastguard Worker
152*9880d681SAndroid Build Coastguard Worker // If we are already folding into another operand of MI, then
153*9880d681SAndroid Build Coastguard Worker // we can't commute the instruction, otherwise we risk making the
154*9880d681SAndroid Build Coastguard Worker // other fold illegal.
155*9880d681SAndroid Build Coastguard Worker if (isUseMIInFoldList(FoldList, MI))
156*9880d681SAndroid Build Coastguard Worker return false;
157*9880d681SAndroid Build Coastguard Worker
158*9880d681SAndroid Build Coastguard Worker // Operand is not legal, so try to commute the instruction to
159*9880d681SAndroid Build Coastguard Worker // see if this makes it possible to fold.
160*9880d681SAndroid Build Coastguard Worker unsigned CommuteIdx0 = TargetInstrInfo::CommuteAnyOperandIndex;
161*9880d681SAndroid Build Coastguard Worker unsigned CommuteIdx1 = TargetInstrInfo::CommuteAnyOperandIndex;
162*9880d681SAndroid Build Coastguard Worker bool CanCommute = TII->findCommutedOpIndices(*MI, CommuteIdx0, CommuteIdx1);
163*9880d681SAndroid Build Coastguard Worker
164*9880d681SAndroid Build Coastguard Worker if (CanCommute) {
165*9880d681SAndroid Build Coastguard Worker if (CommuteIdx0 == OpNo)
166*9880d681SAndroid Build Coastguard Worker OpNo = CommuteIdx1;
167*9880d681SAndroid Build Coastguard Worker else if (CommuteIdx1 == OpNo)
168*9880d681SAndroid Build Coastguard Worker OpNo = CommuteIdx0;
169*9880d681SAndroid Build Coastguard Worker }
170*9880d681SAndroid Build Coastguard Worker
171*9880d681SAndroid Build Coastguard Worker // One of operands might be an Imm operand, and OpNo may refer to it after
172*9880d681SAndroid Build Coastguard Worker // the call of commuteInstruction() below. Such situations are avoided
173*9880d681SAndroid Build Coastguard Worker // here explicitly as OpNo must be a register operand to be a candidate
174*9880d681SAndroid Build Coastguard Worker // for memory folding.
175*9880d681SAndroid Build Coastguard Worker if (CanCommute && (!MI->getOperand(CommuteIdx0).isReg() ||
176*9880d681SAndroid Build Coastguard Worker !MI->getOperand(CommuteIdx1).isReg()))
177*9880d681SAndroid Build Coastguard Worker return false;
178*9880d681SAndroid Build Coastguard Worker
179*9880d681SAndroid Build Coastguard Worker if (!CanCommute ||
180*9880d681SAndroid Build Coastguard Worker !TII->commuteInstruction(*MI, false, CommuteIdx0, CommuteIdx1))
181*9880d681SAndroid Build Coastguard Worker return false;
182*9880d681SAndroid Build Coastguard Worker
183*9880d681SAndroid Build Coastguard Worker if (!TII->isOperandLegal(*MI, OpNo, OpToFold))
184*9880d681SAndroid Build Coastguard Worker return false;
185*9880d681SAndroid Build Coastguard Worker }
186*9880d681SAndroid Build Coastguard Worker
187*9880d681SAndroid Build Coastguard Worker FoldList.push_back(FoldCandidate(MI, OpNo, OpToFold));
188*9880d681SAndroid Build Coastguard Worker return true;
189*9880d681SAndroid Build Coastguard Worker }
190*9880d681SAndroid Build Coastguard Worker
foldOperand(MachineOperand & OpToFold,MachineInstr * UseMI,unsigned UseOpIdx,std::vector<FoldCandidate> & FoldList,SmallVectorImpl<MachineInstr * > & CopiesToReplace,const SIInstrInfo * TII,const SIRegisterInfo & TRI,MachineRegisterInfo & MRI)191*9880d681SAndroid Build Coastguard Worker static void foldOperand(MachineOperand &OpToFold, MachineInstr *UseMI,
192*9880d681SAndroid Build Coastguard Worker unsigned UseOpIdx,
193*9880d681SAndroid Build Coastguard Worker std::vector<FoldCandidate> &FoldList,
194*9880d681SAndroid Build Coastguard Worker SmallVectorImpl<MachineInstr *> &CopiesToReplace,
195*9880d681SAndroid Build Coastguard Worker const SIInstrInfo *TII, const SIRegisterInfo &TRI,
196*9880d681SAndroid Build Coastguard Worker MachineRegisterInfo &MRI) {
197*9880d681SAndroid Build Coastguard Worker const MachineOperand &UseOp = UseMI->getOperand(UseOpIdx);
198*9880d681SAndroid Build Coastguard Worker
199*9880d681SAndroid Build Coastguard Worker // FIXME: Fold operands with subregs.
200*9880d681SAndroid Build Coastguard Worker if (UseOp.isReg() && ((UseOp.getSubReg() && OpToFold.isReg()) ||
201*9880d681SAndroid Build Coastguard Worker UseOp.isImplicit())) {
202*9880d681SAndroid Build Coastguard Worker return;
203*9880d681SAndroid Build Coastguard Worker }
204*9880d681SAndroid Build Coastguard Worker
205*9880d681SAndroid Build Coastguard Worker bool FoldingImm = OpToFold.isImm();
206*9880d681SAndroid Build Coastguard Worker APInt Imm;
207*9880d681SAndroid Build Coastguard Worker
208*9880d681SAndroid Build Coastguard Worker if (FoldingImm) {
209*9880d681SAndroid Build Coastguard Worker unsigned UseReg = UseOp.getReg();
210*9880d681SAndroid Build Coastguard Worker const TargetRegisterClass *UseRC
211*9880d681SAndroid Build Coastguard Worker = TargetRegisterInfo::isVirtualRegister(UseReg) ?
212*9880d681SAndroid Build Coastguard Worker MRI.getRegClass(UseReg) :
213*9880d681SAndroid Build Coastguard Worker TRI.getPhysRegClass(UseReg);
214*9880d681SAndroid Build Coastguard Worker
215*9880d681SAndroid Build Coastguard Worker Imm = APInt(64, OpToFold.getImm());
216*9880d681SAndroid Build Coastguard Worker
217*9880d681SAndroid Build Coastguard Worker const MCInstrDesc &FoldDesc = TII->get(OpToFold.getParent()->getOpcode());
218*9880d681SAndroid Build Coastguard Worker const TargetRegisterClass *FoldRC =
219*9880d681SAndroid Build Coastguard Worker TRI.getRegClass(FoldDesc.OpInfo[0].RegClass);
220*9880d681SAndroid Build Coastguard Worker
221*9880d681SAndroid Build Coastguard Worker // Split 64-bit constants into 32-bits for folding.
222*9880d681SAndroid Build Coastguard Worker if (FoldRC->getSize() == 8 && UseOp.getSubReg()) {
223*9880d681SAndroid Build Coastguard Worker if (UseRC->getSize() != 8)
224*9880d681SAndroid Build Coastguard Worker return;
225*9880d681SAndroid Build Coastguard Worker
226*9880d681SAndroid Build Coastguard Worker if (UseOp.getSubReg() == AMDGPU::sub0) {
227*9880d681SAndroid Build Coastguard Worker Imm = Imm.getLoBits(32);
228*9880d681SAndroid Build Coastguard Worker } else {
229*9880d681SAndroid Build Coastguard Worker assert(UseOp.getSubReg() == AMDGPU::sub1);
230*9880d681SAndroid Build Coastguard Worker Imm = Imm.getHiBits(32);
231*9880d681SAndroid Build Coastguard Worker }
232*9880d681SAndroid Build Coastguard Worker }
233*9880d681SAndroid Build Coastguard Worker
234*9880d681SAndroid Build Coastguard Worker // In order to fold immediates into copies, we need to change the
235*9880d681SAndroid Build Coastguard Worker // copy to a MOV.
236*9880d681SAndroid Build Coastguard Worker if (UseMI->getOpcode() == AMDGPU::COPY) {
237*9880d681SAndroid Build Coastguard Worker unsigned DestReg = UseMI->getOperand(0).getReg();
238*9880d681SAndroid Build Coastguard Worker const TargetRegisterClass *DestRC
239*9880d681SAndroid Build Coastguard Worker = TargetRegisterInfo::isVirtualRegister(DestReg) ?
240*9880d681SAndroid Build Coastguard Worker MRI.getRegClass(DestReg) :
241*9880d681SAndroid Build Coastguard Worker TRI.getPhysRegClass(DestReg);
242*9880d681SAndroid Build Coastguard Worker
243*9880d681SAndroid Build Coastguard Worker unsigned MovOp = TII->getMovOpcode(DestRC);
244*9880d681SAndroid Build Coastguard Worker if (MovOp == AMDGPU::COPY)
245*9880d681SAndroid Build Coastguard Worker return;
246*9880d681SAndroid Build Coastguard Worker
247*9880d681SAndroid Build Coastguard Worker UseMI->setDesc(TII->get(MovOp));
248*9880d681SAndroid Build Coastguard Worker CopiesToReplace.push_back(UseMI);
249*9880d681SAndroid Build Coastguard Worker }
250*9880d681SAndroid Build Coastguard Worker }
251*9880d681SAndroid Build Coastguard Worker
252*9880d681SAndroid Build Coastguard Worker // Special case for REG_SEQUENCE: We can't fold literals into
253*9880d681SAndroid Build Coastguard Worker // REG_SEQUENCE instructions, so we have to fold them into the
254*9880d681SAndroid Build Coastguard Worker // uses of REG_SEQUENCE.
255*9880d681SAndroid Build Coastguard Worker if (UseMI->getOpcode() == AMDGPU::REG_SEQUENCE) {
256*9880d681SAndroid Build Coastguard Worker unsigned RegSeqDstReg = UseMI->getOperand(0).getReg();
257*9880d681SAndroid Build Coastguard Worker unsigned RegSeqDstSubReg = UseMI->getOperand(UseOpIdx + 1).getImm();
258*9880d681SAndroid Build Coastguard Worker
259*9880d681SAndroid Build Coastguard Worker for (MachineRegisterInfo::use_iterator
260*9880d681SAndroid Build Coastguard Worker RSUse = MRI.use_begin(RegSeqDstReg),
261*9880d681SAndroid Build Coastguard Worker RSE = MRI.use_end(); RSUse != RSE; ++RSUse) {
262*9880d681SAndroid Build Coastguard Worker
263*9880d681SAndroid Build Coastguard Worker MachineInstr *RSUseMI = RSUse->getParent();
264*9880d681SAndroid Build Coastguard Worker if (RSUse->getSubReg() != RegSeqDstSubReg)
265*9880d681SAndroid Build Coastguard Worker continue;
266*9880d681SAndroid Build Coastguard Worker
267*9880d681SAndroid Build Coastguard Worker foldOperand(OpToFold, RSUseMI, RSUse.getOperandNo(), FoldList,
268*9880d681SAndroid Build Coastguard Worker CopiesToReplace, TII, TRI, MRI);
269*9880d681SAndroid Build Coastguard Worker }
270*9880d681SAndroid Build Coastguard Worker return;
271*9880d681SAndroid Build Coastguard Worker }
272*9880d681SAndroid Build Coastguard Worker
273*9880d681SAndroid Build Coastguard Worker const MCInstrDesc &UseDesc = UseMI->getDesc();
274*9880d681SAndroid Build Coastguard Worker
275*9880d681SAndroid Build Coastguard Worker // Don't fold into target independent nodes. Target independent opcodes
276*9880d681SAndroid Build Coastguard Worker // don't have defined register classes.
277*9880d681SAndroid Build Coastguard Worker if (UseDesc.isVariadic() ||
278*9880d681SAndroid Build Coastguard Worker UseDesc.OpInfo[UseOpIdx].RegClass == -1)
279*9880d681SAndroid Build Coastguard Worker return;
280*9880d681SAndroid Build Coastguard Worker
281*9880d681SAndroid Build Coastguard Worker if (FoldingImm) {
282*9880d681SAndroid Build Coastguard Worker MachineOperand ImmOp = MachineOperand::CreateImm(Imm.getSExtValue());
283*9880d681SAndroid Build Coastguard Worker tryAddToFoldList(FoldList, UseMI, UseOpIdx, &ImmOp, TII);
284*9880d681SAndroid Build Coastguard Worker return;
285*9880d681SAndroid Build Coastguard Worker }
286*9880d681SAndroid Build Coastguard Worker
287*9880d681SAndroid Build Coastguard Worker tryAddToFoldList(FoldList, UseMI, UseOpIdx, &OpToFold, TII);
288*9880d681SAndroid Build Coastguard Worker
289*9880d681SAndroid Build Coastguard Worker // FIXME: We could try to change the instruction from 64-bit to 32-bit
290*9880d681SAndroid Build Coastguard Worker // to enable more folding opportunites. The shrink operands pass
291*9880d681SAndroid Build Coastguard Worker // already does this.
292*9880d681SAndroid Build Coastguard Worker return;
293*9880d681SAndroid Build Coastguard Worker }
294*9880d681SAndroid Build Coastguard Worker
runOnMachineFunction(MachineFunction & MF)295*9880d681SAndroid Build Coastguard Worker bool SIFoldOperands::runOnMachineFunction(MachineFunction &MF) {
296*9880d681SAndroid Build Coastguard Worker if (skipFunction(*MF.getFunction()))
297*9880d681SAndroid Build Coastguard Worker return false;
298*9880d681SAndroid Build Coastguard Worker
299*9880d681SAndroid Build Coastguard Worker const SISubtarget &ST = MF.getSubtarget<SISubtarget>();
300*9880d681SAndroid Build Coastguard Worker
301*9880d681SAndroid Build Coastguard Worker MachineRegisterInfo &MRI = MF.getRegInfo();
302*9880d681SAndroid Build Coastguard Worker const SIInstrInfo *TII = ST.getInstrInfo();
303*9880d681SAndroid Build Coastguard Worker const SIRegisterInfo &TRI = TII->getRegisterInfo();
304*9880d681SAndroid Build Coastguard Worker
305*9880d681SAndroid Build Coastguard Worker for (MachineFunction::iterator BI = MF.begin(), BE = MF.end();
306*9880d681SAndroid Build Coastguard Worker BI != BE; ++BI) {
307*9880d681SAndroid Build Coastguard Worker
308*9880d681SAndroid Build Coastguard Worker MachineBasicBlock &MBB = *BI;
309*9880d681SAndroid Build Coastguard Worker MachineBasicBlock::iterator I, Next;
310*9880d681SAndroid Build Coastguard Worker for (I = MBB.begin(); I != MBB.end(); I = Next) {
311*9880d681SAndroid Build Coastguard Worker Next = std::next(I);
312*9880d681SAndroid Build Coastguard Worker MachineInstr &MI = *I;
313*9880d681SAndroid Build Coastguard Worker
314*9880d681SAndroid Build Coastguard Worker if (!isSafeToFold(MI.getOpcode()))
315*9880d681SAndroid Build Coastguard Worker continue;
316*9880d681SAndroid Build Coastguard Worker
317*9880d681SAndroid Build Coastguard Worker unsigned OpSize = TII->getOpSize(MI, 1);
318*9880d681SAndroid Build Coastguard Worker MachineOperand &OpToFold = MI.getOperand(1);
319*9880d681SAndroid Build Coastguard Worker bool FoldingImm = OpToFold.isImm();
320*9880d681SAndroid Build Coastguard Worker
321*9880d681SAndroid Build Coastguard Worker // FIXME: We could also be folding things like FrameIndexes and
322*9880d681SAndroid Build Coastguard Worker // TargetIndexes.
323*9880d681SAndroid Build Coastguard Worker if (!FoldingImm && !OpToFold.isReg())
324*9880d681SAndroid Build Coastguard Worker continue;
325*9880d681SAndroid Build Coastguard Worker
326*9880d681SAndroid Build Coastguard Worker // Folding immediates with more than one use will increase program size.
327*9880d681SAndroid Build Coastguard Worker // FIXME: This will also reduce register usage, which may be better
328*9880d681SAndroid Build Coastguard Worker // in some cases. A better heuristic is needed.
329*9880d681SAndroid Build Coastguard Worker if (FoldingImm && !TII->isInlineConstant(OpToFold, OpSize) &&
330*9880d681SAndroid Build Coastguard Worker !MRI.hasOneUse(MI.getOperand(0).getReg()))
331*9880d681SAndroid Build Coastguard Worker continue;
332*9880d681SAndroid Build Coastguard Worker
333*9880d681SAndroid Build Coastguard Worker if (OpToFold.isReg() &&
334*9880d681SAndroid Build Coastguard Worker !TargetRegisterInfo::isVirtualRegister(OpToFold.getReg()))
335*9880d681SAndroid Build Coastguard Worker continue;
336*9880d681SAndroid Build Coastguard Worker
337*9880d681SAndroid Build Coastguard Worker // Prevent folding operands backwards in the function. For example,
338*9880d681SAndroid Build Coastguard Worker // the COPY opcode must not be replaced by 1 in this example:
339*9880d681SAndroid Build Coastguard Worker //
340*9880d681SAndroid Build Coastguard Worker // %vreg3<def> = COPY %VGPR0; VGPR_32:%vreg3
341*9880d681SAndroid Build Coastguard Worker // ...
342*9880d681SAndroid Build Coastguard Worker // %VGPR0<def> = V_MOV_B32_e32 1, %EXEC<imp-use>
343*9880d681SAndroid Build Coastguard Worker MachineOperand &Dst = MI.getOperand(0);
344*9880d681SAndroid Build Coastguard Worker if (Dst.isReg() &&
345*9880d681SAndroid Build Coastguard Worker !TargetRegisterInfo::isVirtualRegister(Dst.getReg()))
346*9880d681SAndroid Build Coastguard Worker continue;
347*9880d681SAndroid Build Coastguard Worker
348*9880d681SAndroid Build Coastguard Worker // We need mutate the operands of new mov instructions to add implicit
349*9880d681SAndroid Build Coastguard Worker // uses of EXEC, but adding them invalidates the use_iterator, so defer
350*9880d681SAndroid Build Coastguard Worker // this.
351*9880d681SAndroid Build Coastguard Worker SmallVector<MachineInstr *, 4> CopiesToReplace;
352*9880d681SAndroid Build Coastguard Worker
353*9880d681SAndroid Build Coastguard Worker std::vector<FoldCandidate> FoldList;
354*9880d681SAndroid Build Coastguard Worker for (MachineRegisterInfo::use_iterator
355*9880d681SAndroid Build Coastguard Worker Use = MRI.use_begin(MI.getOperand(0).getReg()), E = MRI.use_end();
356*9880d681SAndroid Build Coastguard Worker Use != E; ++Use) {
357*9880d681SAndroid Build Coastguard Worker
358*9880d681SAndroid Build Coastguard Worker MachineInstr *UseMI = Use->getParent();
359*9880d681SAndroid Build Coastguard Worker
360*9880d681SAndroid Build Coastguard Worker foldOperand(OpToFold, UseMI, Use.getOperandNo(), FoldList,
361*9880d681SAndroid Build Coastguard Worker CopiesToReplace, TII, TRI, MRI);
362*9880d681SAndroid Build Coastguard Worker }
363*9880d681SAndroid Build Coastguard Worker
364*9880d681SAndroid Build Coastguard Worker // Make sure we add EXEC uses to any new v_mov instructions created.
365*9880d681SAndroid Build Coastguard Worker for (MachineInstr *Copy : CopiesToReplace)
366*9880d681SAndroid Build Coastguard Worker Copy->addImplicitDefUseOperands(MF);
367*9880d681SAndroid Build Coastguard Worker
368*9880d681SAndroid Build Coastguard Worker for (FoldCandidate &Fold : FoldList) {
369*9880d681SAndroid Build Coastguard Worker if (updateOperand(Fold, TRI)) {
370*9880d681SAndroid Build Coastguard Worker // Clear kill flags.
371*9880d681SAndroid Build Coastguard Worker if (!Fold.isImm()) {
372*9880d681SAndroid Build Coastguard Worker assert(Fold.OpToFold && Fold.OpToFold->isReg());
373*9880d681SAndroid Build Coastguard Worker // FIXME: Probably shouldn't bother trying to fold if not an
374*9880d681SAndroid Build Coastguard Worker // SGPR. PeepholeOptimizer can eliminate redundant VGPR->VGPR
375*9880d681SAndroid Build Coastguard Worker // copies.
376*9880d681SAndroid Build Coastguard Worker MRI.clearKillFlags(Fold.OpToFold->getReg());
377*9880d681SAndroid Build Coastguard Worker }
378*9880d681SAndroid Build Coastguard Worker DEBUG(dbgs() << "Folded source from " << MI << " into OpNo " <<
379*9880d681SAndroid Build Coastguard Worker Fold.UseOpNo << " of " << *Fold.UseMI << '\n');
380*9880d681SAndroid Build Coastguard Worker }
381*9880d681SAndroid Build Coastguard Worker }
382*9880d681SAndroid Build Coastguard Worker }
383*9880d681SAndroid Build Coastguard Worker }
384*9880d681SAndroid Build Coastguard Worker return false;
385*9880d681SAndroid Build Coastguard Worker }
386