1 //===-- WebAssemblyRegStackify.cpp - Register Stackification --------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
11 /// \brief This file implements a register stacking pass.
13 /// This pass reorders instructions to put register uses and defs in an order
14 /// such that they form single-use expression trees. Registers fitting this form
15 /// are then marked as "stackified", meaning references to them are replaced by
16 /// "push" and "pop" from the value stack.
18 /// This is primarily a code size optimization, since temporary values on the
19 /// value stack don't need to be named.
21 //===----------------------------------------------------------------------===//
23 #include "WebAssembly.h"
24 #include "MCTargetDesc/WebAssemblyMCTargetDesc.h" // for WebAssembly::ARGUMENT_*
25 #include "WebAssemblyMachineFunctionInfo.h"
26 #include "WebAssemblySubtarget.h"
27 #include "WebAssemblyUtilities.h"
28 #include "llvm/Analysis/AliasAnalysis.h"
29 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
30 #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
31 #include "llvm/CodeGen/MachineDominators.h"
32 #include "llvm/CodeGen/MachineInstrBuilder.h"
33 #include "llvm/CodeGen/MachineModuleInfoImpls.h"
34 #include "llvm/CodeGen/MachineRegisterInfo.h"
35 #include "llvm/CodeGen/Passes.h"
36 #include "llvm/Support/Debug.h"
37 #include "llvm/Support/raw_ostream.h"
40 #define DEBUG_TYPE "wasm-reg-stackify"
43 class WebAssemblyRegStackify final : public MachineFunctionPass {
44 StringRef getPassName() const override {
45 return "WebAssembly Register Stackify";
48 void getAnalysisUsage(AnalysisUsage &AU) const override {
50 AU.addRequired<AAResultsWrapperPass>();
51 AU.addRequired<MachineDominatorTree>();
52 AU.addRequired<LiveIntervals>();
53 AU.addPreserved<MachineBlockFrequencyInfo>();
54 AU.addPreserved<SlotIndexes>();
55 AU.addPreserved<LiveIntervals>();
56 AU.addPreservedID(LiveVariablesID);
57 AU.addPreserved<MachineDominatorTree>();
58 MachineFunctionPass::getAnalysisUsage(AU);
61 bool runOnMachineFunction(MachineFunction &MF) override;
64 static char ID; // Pass identification, replacement for typeid
65 WebAssemblyRegStackify() : MachineFunctionPass(ID) {}
67 } // end anonymous namespace
69 char WebAssemblyRegStackify::ID = 0;
70 FunctionPass *llvm::createWebAssemblyRegStackify() {
71 return new WebAssemblyRegStackify();
74 // Decorate the given instruction with implicit operands that enforce the
75 // expression stack ordering constraints for an instruction which is on
76 // the expression stack.
77 static void ImposeStackOrdering(MachineInstr *MI) {
78 // Write the opaque VALUE_STACK register.
79 if (!MI->definesRegister(WebAssembly::VALUE_STACK))
80 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
84 // Also read the opaque VALUE_STACK register.
85 if (!MI->readsRegister(WebAssembly::VALUE_STACK))
86 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
91 // Convert an IMPLICIT_DEF instruction into an instruction which defines
92 // a constant zero value.
93 static void ConvertImplicitDefToConstZero(MachineInstr *MI,
94 MachineRegisterInfo &MRI,
95 const TargetInstrInfo *TII,
96 MachineFunction &MF) {
97 assert(MI->getOpcode() == TargetOpcode::IMPLICIT_DEF);
99 const auto *RegClass =
100 MRI.getRegClass(MI->getOperand(0).getReg());
101 if (RegClass == &WebAssembly::I32RegClass) {
102 MI->setDesc(TII->get(WebAssembly::CONST_I32));
103 MI->addOperand(MachineOperand::CreateImm(0));
104 } else if (RegClass == &WebAssembly::I64RegClass) {
105 MI->setDesc(TII->get(WebAssembly::CONST_I64));
106 MI->addOperand(MachineOperand::CreateImm(0));
107 } else if (RegClass == &WebAssembly::F32RegClass) {
108 MI->setDesc(TII->get(WebAssembly::CONST_F32));
109 ConstantFP *Val = cast<ConstantFP>(Constant::getNullValue(
110 Type::getFloatTy(MF.getFunction()->getContext())));
111 MI->addOperand(MachineOperand::CreateFPImm(Val));
112 } else if (RegClass == &WebAssembly::F64RegClass) {
113 MI->setDesc(TII->get(WebAssembly::CONST_F64));
114 ConstantFP *Val = cast<ConstantFP>(Constant::getNullValue(
115 Type::getDoubleTy(MF.getFunction()->getContext())));
116 MI->addOperand(MachineOperand::CreateFPImm(Val));
118 llvm_unreachable("Unexpected reg class");
122 // Determine whether a call to the callee referenced by
123 // MI->getOperand(CalleeOpNo) reads memory, writes memory, and/or has side
125 static void QueryCallee(const MachineInstr &MI, unsigned CalleeOpNo, bool &Read,
126 bool &Write, bool &Effects, bool &StackPointer) {
127 // All calls can use the stack pointer.
130 const MachineOperand &MO = MI.getOperand(CalleeOpNo);
132 const Constant *GV = MO.getGlobal();
133 if (const GlobalAlias *GA = dyn_cast<GlobalAlias>(GV))
134 if (!GA->isInterposable())
135 GV = GA->getAliasee();
137 if (const Function *F = dyn_cast<Function>(GV)) {
138 if (!F->doesNotThrow())
140 if (F->doesNotAccessMemory())
142 if (F->onlyReadsMemory()) {
155 // Determine whether MI reads memory, writes memory, has side effects,
156 // and/or uses the stack pointer value.
157 static void Query(const MachineInstr &MI, AliasAnalysis &AA, bool &Read,
158 bool &Write, bool &Effects, bool &StackPointer) {
159 assert(!MI.isPosition());
160 assert(!MI.isTerminator());
162 if (MI.isDebugValue())
166 if (MI.mayLoad() && !MI.isDereferenceableInvariantLoad(&AA))
173 const MachineFunction &MF = *MI.getParent()->getParent();
174 if (MF.getSubtarget<WebAssemblySubtarget>()
175 .getTargetTriple().isOSBinFormatELF()) {
176 // Check for stores to __stack_pointer.
177 for (auto MMO : MI.memoperands()) {
178 const MachinePointerInfo &MPI = MMO->getPointerInfo();
179 if (MPI.V.is<const PseudoSourceValue *>()) {
180 auto PSV = MPI.V.get<const PseudoSourceValue *>();
181 if (const ExternalSymbolPseudoSourceValue *EPSV =
182 dyn_cast<ExternalSymbolPseudoSourceValue>(PSV))
183 if (StringRef(EPSV->getSymbol()) == "__stack_pointer")
188 // Check for sets of the stack pointer.
189 const MachineModuleInfoWasm &MMIW =
190 MF.getMMI().getObjFileInfo<MachineModuleInfoWasm>();
191 if ((MI.getOpcode() == WebAssembly::SET_LOCAL_I32 ||
192 MI.getOpcode() == WebAssembly::SET_LOCAL_I64) &&
193 MI.getOperand(0).getImm() == MMIW.getStackPointerGlobal()) {
197 } else if (MI.hasOrderedMemoryRef()) {
198 switch (MI.getOpcode()) {
199 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64:
200 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64:
201 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64:
202 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64:
203 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32:
204 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64:
205 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32:
206 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64:
207 // These instruction have hasUnmodeledSideEffects() returning true
208 // because they trap on overflow and invalid so they can't be arbitrarily
209 // moved, however hasOrderedMemoryRef() interprets this plus their lack
210 // of memoperands as having a potential unknown memory reference.
213 // Record volatile accesses, unless it's a call, as calls are handled
223 // Check for side effects.
224 if (MI.hasUnmodeledSideEffects()) {
225 switch (MI.getOpcode()) {
226 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64:
227 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64:
228 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64:
229 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64:
230 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32:
231 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64:
232 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32:
233 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64:
234 // These instructions have hasUnmodeledSideEffects() returning true
235 // because they trap on overflow and invalid so they can't be arbitrarily
236 // moved, however in the specific case of register stackifying, it is safe
237 // to move them because overflow and invalid are Undefined Behavior.
247 switch (MI.getOpcode()) {
248 case WebAssembly::CALL_VOID:
249 case WebAssembly::CALL_INDIRECT_VOID:
250 QueryCallee(MI, 0, Read, Write, Effects, StackPointer);
252 case WebAssembly::CALL_I32: case WebAssembly::CALL_I64:
253 case WebAssembly::CALL_F32: case WebAssembly::CALL_F64:
254 case WebAssembly::CALL_INDIRECT_I32: case WebAssembly::CALL_INDIRECT_I64:
255 case WebAssembly::CALL_INDIRECT_F32: case WebAssembly::CALL_INDIRECT_F64:
256 QueryCallee(MI, 1, Read, Write, Effects, StackPointer);
259 llvm_unreachable("unexpected call opcode");
264 // Test whether Def is safe and profitable to rematerialize.
265 static bool ShouldRematerialize(const MachineInstr &Def, AliasAnalysis &AA,
266 const WebAssemblyInstrInfo *TII) {
267 return Def.isAsCheapAsAMove() && TII->isTriviallyReMaterializable(Def, &AA);
270 // Identify the definition for this register at this point. This is a
271 // generalization of MachineRegisterInfo::getUniqueVRegDef that uses
272 // LiveIntervals to handle complex cases.
273 static MachineInstr *GetVRegDef(unsigned Reg, const MachineInstr *Insert,
274 const MachineRegisterInfo &MRI,
275 const LiveIntervals &LIS)
277 // Most registers are in SSA form here so we try a quick MRI query first.
278 if (MachineInstr *Def = MRI.getUniqueVRegDef(Reg))
281 // MRI doesn't know what the Def is. Try asking LIS.
282 if (const VNInfo *ValNo = LIS.getInterval(Reg).getVNInfoBefore(
283 LIS.getInstructionIndex(*Insert)))
284 return LIS.getInstructionFromIndex(ValNo->def);
289 // Test whether Reg, as defined at Def, has exactly one use. This is a
290 // generalization of MachineRegisterInfo::hasOneUse that uses LiveIntervals
291 // to handle complex cases.
292 static bool HasOneUse(unsigned Reg, MachineInstr *Def,
293 MachineRegisterInfo &MRI, MachineDominatorTree &MDT,
294 LiveIntervals &LIS) {
295 // Most registers are in SSA form here so we try a quick MRI query first.
296 if (MRI.hasOneUse(Reg))
300 const LiveInterval &LI = LIS.getInterval(Reg);
301 const VNInfo *DefVNI = LI.getVNInfoAt(
302 LIS.getInstructionIndex(*Def).getRegSlot());
304 for (auto &I : MRI.use_nodbg_operands(Reg)) {
305 const auto &Result = LI.Query(LIS.getInstructionIndex(*I.getParent()));
306 if (Result.valueIn() == DefVNI) {
307 if (!Result.isKill())
317 // Test whether it's safe to move Def to just before Insert.
318 // TODO: Compute memory dependencies in a way that doesn't require always
319 // walking the block.
320 // TODO: Compute memory dependencies in a way that uses AliasAnalysis to be
322 static bool IsSafeToMove(const MachineInstr *Def, const MachineInstr *Insert,
323 AliasAnalysis &AA, const MachineRegisterInfo &MRI) {
324 assert(Def->getParent() == Insert->getParent());
326 // Check for register dependencies.
327 SmallVector<unsigned, 4> MutableRegisters;
328 for (const MachineOperand &MO : Def->operands()) {
329 if (!MO.isReg() || MO.isUndef())
331 unsigned Reg = MO.getReg();
333 // If the register is dead here and at Insert, ignore it.
334 if (MO.isDead() && Insert->definesRegister(Reg) &&
335 !Insert->readsRegister(Reg))
338 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
339 // Ignore ARGUMENTS; it's just used to keep the ARGUMENT_* instructions
340 // from moving down, and we've already checked for that.
341 if (Reg == WebAssembly::ARGUMENTS)
343 // If the physical register is never modified, ignore it.
344 if (!MRI.isPhysRegModified(Reg))
346 // Otherwise, it's a physical register with unknown liveness.
350 // If one of the operands isn't in SSA form, it has different values at
351 // different times, and we need to make sure we don't move our use across
353 if (!MO.isDef() && !MRI.hasOneDef(Reg))
354 MutableRegisters.push_back(Reg);
357 bool Read = false, Write = false, Effects = false, StackPointer = false;
358 Query(*Def, AA, Read, Write, Effects, StackPointer);
360 // If the instruction does not access memory and has no side effects, it has
361 // no additional dependencies.
362 bool HasMutableRegisters = !MutableRegisters.empty();
363 if (!Read && !Write && !Effects && !StackPointer && !HasMutableRegisters)
366 // Scan through the intervening instructions between Def and Insert.
367 MachineBasicBlock::const_iterator D(Def), I(Insert);
368 for (--I; I != D; --I) {
369 bool InterveningRead = false;
370 bool InterveningWrite = false;
371 bool InterveningEffects = false;
372 bool InterveningStackPointer = false;
373 Query(*I, AA, InterveningRead, InterveningWrite, InterveningEffects,
374 InterveningStackPointer);
375 if (Effects && InterveningEffects)
377 if (Read && InterveningWrite)
379 if (Write && (InterveningRead || InterveningWrite))
381 if (StackPointer && InterveningStackPointer)
384 for (unsigned Reg : MutableRegisters)
385 for (const MachineOperand &MO : I->operands())
386 if (MO.isReg() && MO.isDef() && MO.getReg() == Reg)
393 /// Test whether OneUse, a use of Reg, dominates all of Reg's other uses.
394 static bool OneUseDominatesOtherUses(unsigned Reg, const MachineOperand &OneUse,
395 const MachineBasicBlock &MBB,
396 const MachineRegisterInfo &MRI,
397 const MachineDominatorTree &MDT,
399 WebAssemblyFunctionInfo &MFI) {
400 const LiveInterval &LI = LIS.getInterval(Reg);
402 const MachineInstr *OneUseInst = OneUse.getParent();
403 VNInfo *OneUseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*OneUseInst));
405 for (const MachineOperand &Use : MRI.use_nodbg_operands(Reg)) {
409 const MachineInstr *UseInst = Use.getParent();
410 VNInfo *UseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*UseInst));
412 if (UseVNI != OneUseVNI)
415 const MachineInstr *OneUseInst = OneUse.getParent();
416 if (UseInst == OneUseInst) {
417 // Another use in the same instruction. We need to ensure that the one
418 // selected use happens "before" it.
422 // Test that the use is dominated by the one selected use.
423 while (!MDT.dominates(OneUseInst, UseInst)) {
424 // Actually, dominating is over-conservative. Test that the use would
425 // happen after the one selected use in the stack evaluation order.
427 // This is needed as a consequence of using implicit get_locals for
428 // uses and implicit set_locals for defs.
429 if (UseInst->getDesc().getNumDefs() == 0)
431 const MachineOperand &MO = UseInst->getOperand(0);
434 unsigned DefReg = MO.getReg();
435 if (!TargetRegisterInfo::isVirtualRegister(DefReg) ||
436 !MFI.isVRegStackified(DefReg))
438 assert(MRI.hasOneUse(DefReg));
439 const MachineOperand &NewUse = *MRI.use_begin(DefReg);
440 const MachineInstr *NewUseInst = NewUse.getParent();
441 if (NewUseInst == OneUseInst) {
442 if (&OneUse > &NewUse)
446 UseInst = NewUseInst;
453 /// Get the appropriate tee opcode for the given register class.
454 static unsigned GetTeeOpcode(const TargetRegisterClass *RC) {
455 if (RC == &WebAssembly::I32RegClass)
456 return WebAssembly::TEE_I32;
457 if (RC == &WebAssembly::I64RegClass)
458 return WebAssembly::TEE_I64;
459 if (RC == &WebAssembly::F32RegClass)
460 return WebAssembly::TEE_F32;
461 if (RC == &WebAssembly::F64RegClass)
462 return WebAssembly::TEE_F64;
463 if (RC == &WebAssembly::V128RegClass)
464 return WebAssembly::TEE_V128;
465 llvm_unreachable("Unexpected register class");
468 // Shrink LI to its uses, cleaning up LI.
469 static void ShrinkToUses(LiveInterval &LI, LiveIntervals &LIS) {
470 if (LIS.shrinkToUses(&LI)) {
471 SmallVector<LiveInterval*, 4> SplitLIs;
472 LIS.splitSeparateComponents(LI, SplitLIs);
476 /// A single-use def in the same block with no intervening memory or register
477 /// dependencies; move the def down and nest it with the current instruction.
478 static MachineInstr *MoveForSingleUse(unsigned Reg, MachineOperand& Op,
480 MachineBasicBlock &MBB,
481 MachineInstr *Insert, LiveIntervals &LIS,
482 WebAssemblyFunctionInfo &MFI,
483 MachineRegisterInfo &MRI) {
484 DEBUG(dbgs() << "Move for single use: "; Def->dump());
486 MBB.splice(Insert, &MBB, Def);
487 LIS.handleMove(*Def);
489 if (MRI.hasOneDef(Reg) && MRI.hasOneUse(Reg)) {
490 // No one else is using this register for anything so we can just stackify
492 MFI.stackifyVReg(Reg);
494 // The register may have unrelated uses or defs; create a new register for
495 // just our one def and use so that we can stackify it.
496 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
497 Def->getOperand(0).setReg(NewReg);
500 // Tell LiveIntervals about the new register.
501 LIS.createAndComputeVirtRegInterval(NewReg);
503 // Tell LiveIntervals about the changes to the old register.
504 LiveInterval &LI = LIS.getInterval(Reg);
505 LI.removeSegment(LIS.getInstructionIndex(*Def).getRegSlot(),
506 LIS.getInstructionIndex(*Op.getParent()).getRegSlot(),
507 /*RemoveDeadValNo=*/true);
509 MFI.stackifyVReg(NewReg);
511 DEBUG(dbgs() << " - Replaced register: "; Def->dump());
514 ImposeStackOrdering(Def);
518 /// A trivially cloneable instruction; clone it and nest the new copy with the
519 /// current instruction.
520 static MachineInstr *RematerializeCheapDef(
521 unsigned Reg, MachineOperand &Op, MachineInstr &Def, MachineBasicBlock &MBB,
522 MachineBasicBlock::instr_iterator Insert, LiveIntervals &LIS,
523 WebAssemblyFunctionInfo &MFI, MachineRegisterInfo &MRI,
524 const WebAssemblyInstrInfo *TII, const WebAssemblyRegisterInfo *TRI) {
525 DEBUG(dbgs() << "Rematerializing cheap def: "; Def.dump());
526 DEBUG(dbgs() << " - for use in "; Op.getParent()->dump());
528 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
529 TII->reMaterialize(MBB, Insert, NewReg, 0, Def, *TRI);
531 MachineInstr *Clone = &*std::prev(Insert);
532 LIS.InsertMachineInstrInMaps(*Clone);
533 LIS.createAndComputeVirtRegInterval(NewReg);
534 MFI.stackifyVReg(NewReg);
535 ImposeStackOrdering(Clone);
537 DEBUG(dbgs() << " - Cloned to "; Clone->dump());
539 // Shrink the interval.
540 bool IsDead = MRI.use_empty(Reg);
542 LiveInterval &LI = LIS.getInterval(Reg);
543 ShrinkToUses(LI, LIS);
544 IsDead = !LI.liveAt(LIS.getInstructionIndex(Def).getDeadSlot());
547 // If that was the last use of the original, delete the original.
549 DEBUG(dbgs() << " - Deleting original\n");
550 SlotIndex Idx = LIS.getInstructionIndex(Def).getRegSlot();
551 LIS.removePhysRegDefAt(WebAssembly::ARGUMENTS, Idx);
552 LIS.removeInterval(Reg);
553 LIS.RemoveMachineInstrFromMaps(Def);
554 Def.eraseFromParent();
560 /// A multiple-use def in the same block with no intervening memory or register
561 /// dependencies; move the def down, nest it with the current instruction, and
562 /// insert a tee to satisfy the rest of the uses. As an illustration, rewrite
565 /// Reg = INST ... // Def
566 /// INST ..., Reg, ... // Insert
567 /// INST ..., Reg, ...
568 /// INST ..., Reg, ...
572 /// DefReg = INST ... // Def (to become the new Insert)
573 /// TeeReg, Reg = TEE_... DefReg
574 /// INST ..., TeeReg, ... // Insert
575 /// INST ..., Reg, ...
576 /// INST ..., Reg, ...
578 /// with DefReg and TeeReg stackified. This eliminates a get_local from the
580 static MachineInstr *MoveAndTeeForMultiUse(
581 unsigned Reg, MachineOperand &Op, MachineInstr *Def, MachineBasicBlock &MBB,
582 MachineInstr *Insert, LiveIntervals &LIS, WebAssemblyFunctionInfo &MFI,
583 MachineRegisterInfo &MRI, const WebAssemblyInstrInfo *TII) {
584 DEBUG(dbgs() << "Move and tee for multi-use:"; Def->dump());
586 // Move Def into place.
587 MBB.splice(Insert, &MBB, Def);
588 LIS.handleMove(*Def);
590 // Create the Tee and attach the registers.
591 const auto *RegClass = MRI.getRegClass(Reg);
592 unsigned TeeReg = MRI.createVirtualRegister(RegClass);
593 unsigned DefReg = MRI.createVirtualRegister(RegClass);
594 MachineOperand &DefMO = Def->getOperand(0);
595 MachineInstr *Tee = BuildMI(MBB, Insert, Insert->getDebugLoc(),
596 TII->get(GetTeeOpcode(RegClass)), TeeReg)
597 .addReg(Reg, RegState::Define)
598 .addReg(DefReg, getUndefRegState(DefMO.isDead()));
600 DefMO.setReg(DefReg);
601 SlotIndex TeeIdx = LIS.InsertMachineInstrInMaps(*Tee).getRegSlot();
602 SlotIndex DefIdx = LIS.getInstructionIndex(*Def).getRegSlot();
604 // Tell LiveIntervals we moved the original vreg def from Def to Tee.
605 LiveInterval &LI = LIS.getInterval(Reg);
606 LiveInterval::iterator I = LI.FindSegmentContaining(DefIdx);
607 VNInfo *ValNo = LI.getVNInfoAt(DefIdx);
610 ShrinkToUses(LI, LIS);
612 // Finish stackifying the new regs.
613 LIS.createAndComputeVirtRegInterval(TeeReg);
614 LIS.createAndComputeVirtRegInterval(DefReg);
615 MFI.stackifyVReg(DefReg);
616 MFI.stackifyVReg(TeeReg);
617 ImposeStackOrdering(Def);
618 ImposeStackOrdering(Tee);
620 DEBUG(dbgs() << " - Replaced register: "; Def->dump());
621 DEBUG(dbgs() << " - Tee instruction: "; Tee->dump());
626 /// A stack for walking the tree of instructions being built, visiting the
627 /// MachineOperands in DFS order.
628 class TreeWalkerState {
629 typedef MachineInstr::mop_iterator mop_iterator;
630 typedef std::reverse_iterator<mop_iterator> mop_reverse_iterator;
631 typedef iterator_range<mop_reverse_iterator> RangeTy;
632 SmallVector<RangeTy, 4> Worklist;
635 explicit TreeWalkerState(MachineInstr *Insert) {
636 const iterator_range<mop_iterator> &Range = Insert->explicit_uses();
637 if (Range.begin() != Range.end())
638 Worklist.push_back(reverse(Range));
641 bool Done() const { return Worklist.empty(); }
643 MachineOperand &Pop() {
644 RangeTy &Range = Worklist.back();
645 MachineOperand &Op = *Range.begin();
646 Range = drop_begin(Range, 1);
647 if (Range.begin() == Range.end())
649 assert((Worklist.empty() ||
650 Worklist.back().begin() != Worklist.back().end()) &&
651 "Empty ranges shouldn't remain in the worklist");
655 /// Push Instr's operands onto the stack to be visited.
656 void PushOperands(MachineInstr *Instr) {
657 const iterator_range<mop_iterator> &Range(Instr->explicit_uses());
658 if (Range.begin() != Range.end())
659 Worklist.push_back(reverse(Range));
662 /// Some of Instr's operands are on the top of the stack; remove them and
663 /// re-insert them starting from the beginning (because we've commuted them).
664 void ResetTopOperands(MachineInstr *Instr) {
665 assert(HasRemainingOperands(Instr) &&
666 "Reseting operands should only be done when the instruction has "
667 "an operand still on the stack");
668 Worklist.back() = reverse(Instr->explicit_uses());
671 /// Test whether Instr has operands remaining to be visited at the top of
673 bool HasRemainingOperands(const MachineInstr *Instr) const {
674 if (Worklist.empty())
676 const RangeTy &Range = Worklist.back();
677 return Range.begin() != Range.end() && Range.begin()->getParent() == Instr;
680 /// Test whether the given register is present on the stack, indicating an
681 /// operand in the tree that we haven't visited yet. Moving a definition of
682 /// Reg to a point in the tree after that would change its value.
684 /// This is needed as a consequence of using implicit get_locals for
685 /// uses and implicit set_locals for defs.
686 bool IsOnStack(unsigned Reg) const {
687 for (const RangeTy &Range : Worklist)
688 for (const MachineOperand &MO : Range)
689 if (MO.isReg() && MO.getReg() == Reg)
695 /// State to keep track of whether commuting is in flight or whether it's been
696 /// tried for the current instruction and didn't work.
697 class CommutingState {
698 /// There are effectively three states: the initial state where we haven't
699 /// started commuting anything and we don't know anything yet, the tenative
700 /// state where we've commuted the operands of the current instruction and are
701 /// revisting it, and the declined state where we've reverted the operands
702 /// back to their original order and will no longer commute it further.
703 bool TentativelyCommuting;
706 /// During the tentative state, these hold the operand indices of the commuted
708 unsigned Operand0, Operand1;
711 CommutingState() : TentativelyCommuting(false), Declined(false) {}
713 /// Stackification for an operand was not successful due to ordering
714 /// constraints. If possible, and if we haven't already tried it and declined
715 /// it, commute Insert's operands and prepare to revisit it.
716 void MaybeCommute(MachineInstr *Insert, TreeWalkerState &TreeWalker,
717 const WebAssemblyInstrInfo *TII) {
718 if (TentativelyCommuting) {
720 "Don't decline commuting until you've finished trying it");
721 // Commuting didn't help. Revert it.
722 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
723 TentativelyCommuting = false;
725 } else if (!Declined && TreeWalker.HasRemainingOperands(Insert)) {
726 Operand0 = TargetInstrInfo::CommuteAnyOperandIndex;
727 Operand1 = TargetInstrInfo::CommuteAnyOperandIndex;
728 if (TII->findCommutedOpIndices(*Insert, Operand0, Operand1)) {
729 // Tentatively commute the operands and try again.
730 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
731 TreeWalker.ResetTopOperands(Insert);
732 TentativelyCommuting = true;
738 /// Stackification for some operand was successful. Reset to the default
741 TentativelyCommuting = false;
745 } // end anonymous namespace
747 bool WebAssemblyRegStackify::runOnMachineFunction(MachineFunction &MF) {
748 DEBUG(dbgs() << "********** Register Stackifying **********\n"
749 "********** Function: "
750 << MF.getName() << '\n');
752 bool Changed = false;
753 MachineRegisterInfo &MRI = MF.getRegInfo();
754 WebAssemblyFunctionInfo &MFI = *MF.getInfo<WebAssemblyFunctionInfo>();
755 const auto *TII = MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo();
756 const auto *TRI = MF.getSubtarget<WebAssemblySubtarget>().getRegisterInfo();
757 AliasAnalysis &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
758 MachineDominatorTree &MDT = getAnalysis<MachineDominatorTree>();
759 LiveIntervals &LIS = getAnalysis<LiveIntervals>();
761 // Walk the instructions from the bottom up. Currently we don't look past
762 // block boundaries, and the blocks aren't ordered so the block visitation
763 // order isn't significant, but we may want to change this in the future.
764 for (MachineBasicBlock &MBB : MF) {
765 // Don't use a range-based for loop, because we modify the list as we're
766 // iterating over it and the end iterator may change.
767 for (auto MII = MBB.rbegin(); MII != MBB.rend(); ++MII) {
768 MachineInstr *Insert = &*MII;
769 // Don't nest anything inside an inline asm, because we don't have
770 // constraints for $push inputs.
771 if (Insert->getOpcode() == TargetOpcode::INLINEASM)
774 // Ignore debugging intrinsics.
775 if (Insert->getOpcode() == TargetOpcode::DBG_VALUE)
778 // Iterate through the inputs in reverse order, since we'll be pulling
779 // operands off the stack in LIFO order.
780 CommutingState Commuting;
781 TreeWalkerState TreeWalker(Insert);
782 while (!TreeWalker.Done()) {
783 MachineOperand &Op = TreeWalker.Pop();
785 // We're only interested in explicit virtual register operands.
789 unsigned Reg = Op.getReg();
790 assert(Op.isUse() && "explicit_uses() should only iterate over uses");
791 assert(!Op.isImplicit() &&
792 "explicit_uses() should only iterate over explicit operands");
793 if (TargetRegisterInfo::isPhysicalRegister(Reg))
796 // Identify the definition for this register at this point.
797 MachineInstr *Def = GetVRegDef(Reg, Insert, MRI, LIS);
801 // Don't nest an INLINE_ASM def into anything, because we don't have
802 // constraints for $pop outputs.
803 if (Def->getOpcode() == TargetOpcode::INLINEASM)
806 // Argument instructions represent live-in registers and not real
808 if (WebAssembly::isArgument(*Def))
811 // Decide which strategy to take. Prefer to move a single-use value
812 // over cloning it, and prefer cloning over introducing a tee.
813 // For moving, we require the def to be in the same block as the use;
814 // this makes things simpler (LiveIntervals' handleMove function only
815 // supports intra-block moves) and it's MachineSink's job to catch all
816 // the sinking opportunities anyway.
817 bool SameBlock = Def->getParent() == &MBB;
818 bool CanMove = SameBlock && IsSafeToMove(Def, Insert, AA, MRI) &&
819 !TreeWalker.IsOnStack(Reg);
820 if (CanMove && HasOneUse(Reg, Def, MRI, MDT, LIS)) {
821 Insert = MoveForSingleUse(Reg, Op, Def, MBB, Insert, LIS, MFI, MRI);
822 } else if (ShouldRematerialize(*Def, AA, TII)) {
824 RematerializeCheapDef(Reg, Op, *Def, MBB, Insert->getIterator(),
825 LIS, MFI, MRI, TII, TRI);
826 } else if (CanMove &&
827 OneUseDominatesOtherUses(Reg, Op, MBB, MRI, MDT, LIS, MFI)) {
828 Insert = MoveAndTeeForMultiUse(Reg, Op, Def, MBB, Insert, LIS, MFI,
831 // We failed to stackify the operand. If the problem was ordering
832 // constraints, Commuting may be able to help.
833 if (!CanMove && SameBlock)
834 Commuting.MaybeCommute(Insert, TreeWalker, TII);
835 // Proceed to the next operand.
839 // If the instruction we just stackified is an IMPLICIT_DEF, convert it
840 // to a constant 0 so that the def is explicit, and the push/pop
841 // correspondence is maintained.
842 if (Insert->getOpcode() == TargetOpcode::IMPLICIT_DEF)
843 ConvertImplicitDefToConstZero(Insert, MRI, TII, MF);
845 // We stackified an operand. Add the defining instruction's operands to
846 // the worklist stack now to continue to build an ever deeper tree.
848 TreeWalker.PushOperands(Insert);
851 // If we stackified any operands, skip over the tree to start looking for
852 // the next instruction we can build a tree on.
853 if (Insert != &*MII) {
854 ImposeStackOrdering(&*MII);
855 MII = MachineBasicBlock::iterator(Insert).getReverse();
861 // If we used VALUE_STACK anywhere, add it to the live-in sets everywhere so
862 // that it never looks like a use-before-def.
864 MF.getRegInfo().addLiveIn(WebAssembly::VALUE_STACK);
865 for (MachineBasicBlock &MBB : MF)
866 MBB.addLiveIn(WebAssembly::VALUE_STACK);
870 // Verify that pushes and pops are performed in LIFO order.
871 SmallVector<unsigned, 0> Stack;
872 for (MachineBasicBlock &MBB : MF) {
873 for (MachineInstr &MI : MBB) {
874 if (MI.isDebugValue())
876 for (MachineOperand &MO : reverse(MI.explicit_operands())) {
879 unsigned Reg = MO.getReg();
881 if (MFI.isVRegStackified(Reg)) {
883 Stack.push_back(Reg);
885 assert(Stack.pop_back_val() == Reg &&
886 "Register stack pop should be paired with a push");
890 // TODO: Generalize this code to support keeping values on the stack across
891 // basic block boundaries.
892 assert(Stack.empty() &&
893 "Register stack pushes and pops should be balanced");