1 //===-- llvm/CodeGen/Spiller.cpp - Spiller -------------------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 #define DEBUG_TYPE "spiller"
13 #include "VirtRegMap.h"
14 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
15 #include "llvm/CodeGen/LiveStackAnalysis.h"
16 #include "llvm/CodeGen/MachineFunction.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/MachineFrameInfo.h"
19 #include "llvm/Target/TargetMachine.h"
20 #include "llvm/Target/TargetInstrInfo.h"
21 #include "llvm/Support/Debug.h"
25 Spiller::~Spiller() {}
29 /// Utility class for spillers.
30 class SpillerBase : public Spiller {
36 MachineFrameInfo *mfi;
37 MachineRegisterInfo *mri;
38 const TargetInstrInfo *tii;
41 /// Construct a spiller base.
42 SpillerBase(MachineFunction *mf, LiveIntervals *lis, LiveStacks *ls,
44 mf(mf), lis(lis), ls(ls), vrm(vrm)
46 mfi = mf->getFrameInfo();
47 mri = &mf->getRegInfo();
48 tii = mf->getTarget().getInstrInfo();
51 /// Ensures there is space before the given machine instruction, returns the
52 /// instruction's new number.
53 unsigned makeSpaceBefore(MachineInstr *mi) {
54 if (!lis->hasGapBeforeInstr(lis->getInstructionIndex(mi))) {
55 lis->scaleNumbering(2);
56 ls->scaleNumbering(2);
59 unsigned miIdx = lis->getInstructionIndex(mi);
61 assert(lis->hasGapBeforeInstr(miIdx));
66 /// Ensure there is space after the given machine instruction, returns the
67 /// instruction's new number.
68 unsigned makeSpaceAfter(MachineInstr *mi) {
69 if (!lis->hasGapAfterInstr(lis->getInstructionIndex(mi))) {
70 lis->scaleNumbering(2);
71 ls->scaleNumbering(2);
74 unsigned miIdx = lis->getInstructionIndex(mi);
76 assert(lis->hasGapAfterInstr(miIdx));
82 /// Insert a store of the given vreg to the given stack slot immediately
83 /// after the given instruction. Returns the base index of the inserted
84 /// instruction. The caller is responsible for adding an appropriate
85 /// LiveInterval to the LiveIntervals analysis.
86 unsigned insertStoreFor(MachineInstr *mi, unsigned ss,
88 const TargetRegisterClass *trc) {
90 MachineBasicBlock::iterator nextInstItr(mi);
93 unsigned miIdx = makeSpaceAfter(mi);
95 tii->storeRegToStackSlot(*mi->getParent(), nextInstItr, vreg,
97 MachineBasicBlock::iterator storeInstItr(mi);
99 MachineInstr *storeInst = &*storeInstItr;
100 unsigned storeInstIdx = miIdx + LiveInterval::InstrSlots::NUM;
102 assert(lis->getInstructionFromIndex(storeInstIdx) == 0 &&
103 "Store inst index already in use.");
105 lis->InsertMachineInstrInMaps(storeInst, storeInstIdx);
110 void insertStoreOnInterval(LiveInterval *li,
111 MachineInstr *mi, unsigned ss,
113 const TargetRegisterClass *trc) {
115 unsigned storeInstIdx = insertStoreFor(mi, ss, vreg, trc);
116 unsigned start = lis->getDefIndex(lis->getInstructionIndex(mi)),
117 end = lis->getUseIndex(storeInstIdx);
120 li->getNextValue(storeInstIdx, 0, true, lis->getVNInfoAllocator());
121 vni->kills.push_back(storeInstIdx);
122 LiveRange lr(start, end, vni);
127 /// Insert a load of the given veg from the given stack slot immediately
128 /// before the given instruction. Returns the base index of the inserted
129 /// instruction. The caller is responsible for adding an appropriate
130 /// LiveInterval to the LiveIntervals analysis.
131 unsigned insertLoadFor(MachineInstr *mi, unsigned ss,
133 const TargetRegisterClass *trc) {
134 MachineBasicBlock::iterator useInstItr(mi);
136 unsigned miIdx = makeSpaceBefore(mi);
138 tii->loadRegFromStackSlot(*mi->getParent(), useInstItr, vreg, ss, trc);
139 MachineBasicBlock::iterator loadInstItr(mi);
141 MachineInstr *loadInst = &*loadInstItr;
142 unsigned loadInstIdx = miIdx - LiveInterval::InstrSlots::NUM;
144 assert(lis->getInstructionFromIndex(loadInstIdx) == 0 &&
145 "Load inst index already in use.");
147 lis->InsertMachineInstrInMaps(loadInst, loadInstIdx);
152 void insertLoadOnInterval(LiveInterval *li,
153 MachineInstr *mi, unsigned ss,
155 const TargetRegisterClass *trc) {
157 unsigned loadInstIdx = insertLoadFor(mi, ss, vreg, trc);
158 unsigned start = lis->getDefIndex(loadInstIdx),
159 end = lis->getUseIndex(lis->getInstructionIndex(mi));
162 li->getNextValue(loadInstIdx, 0, true, lis->getVNInfoAllocator());
163 vni->kills.push_back(lis->getInstructionIndex(mi));
164 LiveRange lr(start, end, vni);
171 /// Add spill ranges for every use/def of the live interval, inserting loads
172 /// immediately before each use, and stores after each def. No folding is
174 std::vector<LiveInterval*> trivialSpillEverywhere(LiveInterval *li) {
175 DOUT << "Spilling everywhere " << *li << "\n";
177 assert(li->weight != HUGE_VALF &&
178 "Attempting to spill already spilled value.");
180 assert(!li->isStackSlot() &&
181 "Trying to spill a stack slot.");
183 std::vector<LiveInterval*> added;
185 const TargetRegisterClass *trc = mri->getRegClass(li->reg);
186 unsigned ss = vrm->assignVirt2StackSlot(li->reg);
188 for (MachineRegisterInfo::reg_iterator
189 regItr = mri->reg_begin(li->reg); regItr != mri->reg_end();) {
191 MachineInstr *mi = &*regItr;
194 } while (regItr != mri->reg_end() && (&*regItr == mi));
196 SmallVector<unsigned, 2> indices;
200 for (unsigned i = 0; i != mi->getNumOperands(); ++i) {
201 MachineOperand &op = mi->getOperand(i);
203 if (!op.isReg() || op.getReg() != li->reg)
206 hasUse |= mi->getOperand(i).isUse();
207 hasDef |= mi->getOperand(i).isDef();
209 indices.push_back(i);
212 unsigned newVReg = mri->createVirtualRegister(trc);
214 vrm->assignVirt2StackSlot(newVReg, ss);
216 LiveInterval *newLI = &lis->getOrCreateInterval(newVReg);
217 newLI->weight = HUGE_VALF;
219 for (unsigned i = 0; i < indices.size(); ++i) {
220 mi->getOperand(indices[i]).setReg(newVReg);
222 if (mi->getOperand(indices[i]).isUse()) {
223 mi->getOperand(indices[i]).setIsKill(true);
227 assert(hasUse || hasDef);
230 insertLoadOnInterval(newLI, mi, ss, newVReg, trc);
234 insertStoreOnInterval(newLI, mi, ss, newVReg, trc);
237 added.push_back(newLI);
246 /// Spills any live range using the spill-everywhere method with no attempt at
248 class TrivialSpiller : public SpillerBase {
251 TrivialSpiller(MachineFunction *mf, LiveIntervals *lis, LiveStacks *ls,
253 SpillerBase(mf, lis, ls, vrm) {}
255 std::vector<LiveInterval*> spill(LiveInterval *li) {
256 return trivialSpillEverywhere(li);
259 std::vector<LiveInterval*> intraBlockSplit(LiveInterval *li, VNInfo *valno) {
260 std::vector<LiveInterval*> spillIntervals;
261 MachineBasicBlock::iterator storeInsertPoint;
263 if (valno->isDefAccurate()) {
264 // If we have an accurate def we can just grab an iterator to the instr
267 next(MachineBasicBlock::iterator(lis->getInstructionFromIndex(valno->def)));
269 // If the def info isn't accurate we check if this is a PHI def.
270 // If it is then def holds the index of the defining Basic Block, and we
271 // can use that to get an insertion point.
272 if (valno->isPHIDef()) {
275 // We have no usable def info. We can't split this value sensibly.
276 // FIXME: Need sensible feedback for "failure to split", an empty
277 // set of spill intervals could be reasonably returned from a
278 // split where both the store and load are folded.
279 return spillIntervals;
285 return spillIntervals;
292 llvm::Spiller* llvm::createSpiller(MachineFunction *mf, LiveIntervals *lis,
293 LiveStacks *ls, VirtRegMap *vrm) {
294 return new TrivialSpiller(mf, lis, ls, vrm);