1 //===-- WebAssemblyRegStackify.cpp - Register Stackification --------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
11 /// \brief This file implements a register stacking pass.
13 /// This pass reorders instructions to put register uses and defs in an order
14 /// such that they form single-use expression trees. Registers fitting this form
15 /// are then marked as "stackified", meaning references to them are replaced by
16 /// "push" and "pop" from the value stack.
18 /// This is primarily a code size optimization, since temporary values on the
19 /// value stack don't need to be named.
21 //===----------------------------------------------------------------------===//
23 #include "WebAssembly.h"
24 #include "MCTargetDesc/WebAssemblyMCTargetDesc.h" // for WebAssembly::ARGUMENT_*
25 #include "WebAssemblyMachineFunctionInfo.h"
26 #include "WebAssemblySubtarget.h"
27 #include "WebAssemblyUtilities.h"
28 #include "llvm/Analysis/AliasAnalysis.h"
29 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
30 #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
31 #include "llvm/CodeGen/MachineDominators.h"
32 #include "llvm/CodeGen/MachineInstrBuilder.h"
33 #include "llvm/CodeGen/MachineRegisterInfo.h"
34 #include "llvm/CodeGen/Passes.h"
35 #include "llvm/Support/Debug.h"
36 #include "llvm/Support/raw_ostream.h"
39 #define DEBUG_TYPE "wasm-reg-stackify"
42 class WebAssemblyRegStackify final : public MachineFunctionPass {
43 StringRef getPassName() const override {
44 return "WebAssembly Register Stackify";
47 void getAnalysisUsage(AnalysisUsage &AU) const override {
49 AU.addRequired<AAResultsWrapperPass>();
50 AU.addRequired<MachineDominatorTree>();
51 AU.addRequired<LiveIntervals>();
52 AU.addPreserved<MachineBlockFrequencyInfo>();
53 AU.addPreserved<SlotIndexes>();
54 AU.addPreserved<LiveIntervals>();
55 AU.addPreservedID(LiveVariablesID);
56 AU.addPreserved<MachineDominatorTree>();
57 MachineFunctionPass::getAnalysisUsage(AU);
60 bool runOnMachineFunction(MachineFunction &MF) override;
63 static char ID; // Pass identification, replacement for typeid
64 WebAssemblyRegStackify() : MachineFunctionPass(ID) {}
66 } // end anonymous namespace
68 char WebAssemblyRegStackify::ID = 0;
69 FunctionPass *llvm::createWebAssemblyRegStackify() {
70 return new WebAssemblyRegStackify();
73 // Decorate the given instruction with implicit operands that enforce the
74 // expression stack ordering constraints for an instruction which is on
75 // the expression stack.
76 static void ImposeStackOrdering(MachineInstr *MI) {
77 // Write the opaque VALUE_STACK register.
78 if (!MI->definesRegister(WebAssembly::VALUE_STACK))
79 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
83 // Also read the opaque VALUE_STACK register.
84 if (!MI->readsRegister(WebAssembly::VALUE_STACK))
85 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
90 // Convert an IMPLICIT_DEF instruction into an instruction which defines
91 // a constant zero value.
92 static void ConvertImplicitDefToConstZero(MachineInstr *MI,
93 MachineRegisterInfo &MRI,
94 const TargetInstrInfo *TII,
95 MachineFunction &MF) {
96 assert(MI->getOpcode() == TargetOpcode::IMPLICIT_DEF);
98 const auto *RegClass =
99 MRI.getRegClass(MI->getOperand(0).getReg());
100 if (RegClass == &WebAssembly::I32RegClass) {
101 MI->setDesc(TII->get(WebAssembly::CONST_I32));
102 MI->addOperand(MachineOperand::CreateImm(0));
103 } else if (RegClass == &WebAssembly::I64RegClass) {
104 MI->setDesc(TII->get(WebAssembly::CONST_I64));
105 MI->addOperand(MachineOperand::CreateImm(0));
106 } else if (RegClass == &WebAssembly::F32RegClass) {
107 MI->setDesc(TII->get(WebAssembly::CONST_F32));
108 ConstantFP *Val = cast<ConstantFP>(Constant::getNullValue(
109 Type::getFloatTy(MF.getFunction()->getContext())));
110 MI->addOperand(MachineOperand::CreateFPImm(Val));
111 } else if (RegClass == &WebAssembly::F64RegClass) {
112 MI->setDesc(TII->get(WebAssembly::CONST_F64));
113 ConstantFP *Val = cast<ConstantFP>(Constant::getNullValue(
114 Type::getDoubleTy(MF.getFunction()->getContext())));
115 MI->addOperand(MachineOperand::CreateFPImm(Val));
117 llvm_unreachable("Unexpected reg class");
121 // Determine whether a call to the callee referenced by
122 // MI->getOperand(CalleeOpNo) reads memory, writes memory, and/or has side
124 static void QueryCallee(const MachineInstr &MI, unsigned CalleeOpNo, bool &Read,
125 bool &Write, bool &Effects, bool &StackPointer) {
126 // All calls can use the stack pointer.
129 const MachineOperand &MO = MI.getOperand(CalleeOpNo);
131 const Constant *GV = MO.getGlobal();
132 if (const GlobalAlias *GA = dyn_cast<GlobalAlias>(GV))
133 if (!GA->isInterposable())
134 GV = GA->getAliasee();
136 if (const Function *F = dyn_cast<Function>(GV)) {
137 if (!F->doesNotThrow())
139 if (F->doesNotAccessMemory())
141 if (F->onlyReadsMemory()) {
154 // Determine whether MI reads memory, writes memory, has side effects,
155 // and/or uses the __stack_pointer value.
156 static void Query(const MachineInstr &MI, AliasAnalysis &AA, bool &Read,
157 bool &Write, bool &Effects, bool &StackPointer) {
158 assert(!MI.isPosition());
159 assert(!MI.isTerminator());
161 if (MI.isDebugValue())
165 if (MI.mayLoad() && !MI.isDereferenceableInvariantLoad(&AA))
172 // Check for stores to __stack_pointer.
173 for (auto MMO : MI.memoperands()) {
174 const MachinePointerInfo &MPI = MMO->getPointerInfo();
175 if (MPI.V.is<const PseudoSourceValue *>()) {
176 auto PSV = MPI.V.get<const PseudoSourceValue *>();
177 if (const ExternalSymbolPseudoSourceValue *EPSV =
178 dyn_cast<ExternalSymbolPseudoSourceValue>(PSV))
179 if (StringRef(EPSV->getSymbol()) == "__stack_pointer")
183 } else if (MI.hasOrderedMemoryRef()) {
184 switch (MI.getOpcode()) {
185 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64:
186 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64:
187 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64:
188 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64:
189 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32:
190 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64:
191 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32:
192 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64:
193 // These instruction have hasUnmodeledSideEffects() returning true
194 // because they trap on overflow and invalid so they can't be arbitrarily
195 // moved, however hasOrderedMemoryRef() interprets this plus their lack
196 // of memoperands as having a potential unknown memory reference.
199 // Record volatile accesses, unless it's a call, as calls are handled
209 // Check for side effects.
210 if (MI.hasUnmodeledSideEffects()) {
211 switch (MI.getOpcode()) {
212 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64:
213 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64:
214 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64:
215 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64:
216 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32:
217 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64:
218 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32:
219 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64:
220 // These instructions have hasUnmodeledSideEffects() returning true
221 // because they trap on overflow and invalid so they can't be arbitrarily
222 // moved, however in the specific case of register stackifying, it is safe
223 // to move them because overflow and invalid are Undefined Behavior.
233 switch (MI.getOpcode()) {
234 case WebAssembly::CALL_VOID:
235 case WebAssembly::CALL_INDIRECT_VOID:
236 QueryCallee(MI, 0, Read, Write, Effects, StackPointer);
238 case WebAssembly::CALL_I32: case WebAssembly::CALL_I64:
239 case WebAssembly::CALL_F32: case WebAssembly::CALL_F64:
240 case WebAssembly::CALL_INDIRECT_I32: case WebAssembly::CALL_INDIRECT_I64:
241 case WebAssembly::CALL_INDIRECT_F32: case WebAssembly::CALL_INDIRECT_F64:
242 QueryCallee(MI, 1, Read, Write, Effects, StackPointer);
245 llvm_unreachable("unexpected call opcode");
250 // Test whether Def is safe and profitable to rematerialize.
251 static bool ShouldRematerialize(const MachineInstr &Def, AliasAnalysis &AA,
252 const WebAssemblyInstrInfo *TII) {
253 return Def.isAsCheapAsAMove() && TII->isTriviallyReMaterializable(Def, &AA);
256 // Identify the definition for this register at this point. This is a
257 // generalization of MachineRegisterInfo::getUniqueVRegDef that uses
258 // LiveIntervals to handle complex cases.
259 static MachineInstr *GetVRegDef(unsigned Reg, const MachineInstr *Insert,
260 const MachineRegisterInfo &MRI,
261 const LiveIntervals &LIS)
263 // Most registers are in SSA form here so we try a quick MRI query first.
264 if (MachineInstr *Def = MRI.getUniqueVRegDef(Reg))
267 // MRI doesn't know what the Def is. Try asking LIS.
268 if (const VNInfo *ValNo = LIS.getInterval(Reg).getVNInfoBefore(
269 LIS.getInstructionIndex(*Insert)))
270 return LIS.getInstructionFromIndex(ValNo->def);
275 // Test whether Reg, as defined at Def, has exactly one use. This is a
276 // generalization of MachineRegisterInfo::hasOneUse that uses LiveIntervals
277 // to handle complex cases.
278 static bool HasOneUse(unsigned Reg, MachineInstr *Def,
279 MachineRegisterInfo &MRI, MachineDominatorTree &MDT,
280 LiveIntervals &LIS) {
281 // Most registers are in SSA form here so we try a quick MRI query first.
282 if (MRI.hasOneUse(Reg))
286 const LiveInterval &LI = LIS.getInterval(Reg);
287 const VNInfo *DefVNI = LI.getVNInfoAt(
288 LIS.getInstructionIndex(*Def).getRegSlot());
290 for (auto &I : MRI.use_nodbg_operands(Reg)) {
291 const auto &Result = LI.Query(LIS.getInstructionIndex(*I.getParent()));
292 if (Result.valueIn() == DefVNI) {
293 if (!Result.isKill())
303 // Test whether it's safe to move Def to just before Insert.
304 // TODO: Compute memory dependencies in a way that doesn't require always
305 // walking the block.
306 // TODO: Compute memory dependencies in a way that uses AliasAnalysis to be
308 static bool IsSafeToMove(const MachineInstr *Def, const MachineInstr *Insert,
309 AliasAnalysis &AA, const MachineRegisterInfo &MRI) {
310 assert(Def->getParent() == Insert->getParent());
312 // Check for register dependencies.
313 SmallVector<unsigned, 4> MutableRegisters;
314 for (const MachineOperand &MO : Def->operands()) {
315 if (!MO.isReg() || MO.isUndef())
317 unsigned Reg = MO.getReg();
319 // If the register is dead here and at Insert, ignore it.
320 if (MO.isDead() && Insert->definesRegister(Reg) &&
321 !Insert->readsRegister(Reg))
324 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
325 // Ignore ARGUMENTS; it's just used to keep the ARGUMENT_* instructions
326 // from moving down, and we've already checked for that.
327 if (Reg == WebAssembly::ARGUMENTS)
329 // If the physical register is never modified, ignore it.
330 if (!MRI.isPhysRegModified(Reg))
332 // Otherwise, it's a physical register with unknown liveness.
336 // If one of the operands isn't in SSA form, it has different values at
337 // different times, and we need to make sure we don't move our use across
339 if (!MO.isDef() && !MRI.hasOneDef(Reg))
340 MutableRegisters.push_back(Reg);
343 bool Read = false, Write = false, Effects = false, StackPointer = false;
344 Query(*Def, AA, Read, Write, Effects, StackPointer);
346 // If the instruction does not access memory and has no side effects, it has
347 // no additional dependencies.
348 bool HasMutableRegisters = !MutableRegisters.empty();
349 if (!Read && !Write && !Effects && !StackPointer && !HasMutableRegisters)
352 // Scan through the intervening instructions between Def and Insert.
353 MachineBasicBlock::const_iterator D(Def), I(Insert);
354 for (--I; I != D; --I) {
355 bool InterveningRead = false;
356 bool InterveningWrite = false;
357 bool InterveningEffects = false;
358 bool InterveningStackPointer = false;
359 Query(*I, AA, InterveningRead, InterveningWrite, InterveningEffects,
360 InterveningStackPointer);
361 if (Effects && InterveningEffects)
363 if (Read && InterveningWrite)
365 if (Write && (InterveningRead || InterveningWrite))
367 if (StackPointer && InterveningStackPointer)
370 for (unsigned Reg : MutableRegisters)
371 for (const MachineOperand &MO : I->operands())
372 if (MO.isReg() && MO.isDef() && MO.getReg() == Reg)
379 /// Test whether OneUse, a use of Reg, dominates all of Reg's other uses.
380 static bool OneUseDominatesOtherUses(unsigned Reg, const MachineOperand &OneUse,
381 const MachineBasicBlock &MBB,
382 const MachineRegisterInfo &MRI,
383 const MachineDominatorTree &MDT,
385 WebAssemblyFunctionInfo &MFI) {
386 const LiveInterval &LI = LIS.getInterval(Reg);
388 const MachineInstr *OneUseInst = OneUse.getParent();
389 VNInfo *OneUseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*OneUseInst));
391 for (const MachineOperand &Use : MRI.use_nodbg_operands(Reg)) {
395 const MachineInstr *UseInst = Use.getParent();
396 VNInfo *UseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*UseInst));
398 if (UseVNI != OneUseVNI)
401 const MachineInstr *OneUseInst = OneUse.getParent();
402 if (UseInst == OneUseInst) {
403 // Another use in the same instruction. We need to ensure that the one
404 // selected use happens "before" it.
408 // Test that the use is dominated by the one selected use.
409 while (!MDT.dominates(OneUseInst, UseInst)) {
410 // Actually, dominating is over-conservative. Test that the use would
411 // happen after the one selected use in the stack evaluation order.
413 // This is needed as a consequence of using implicit get_locals for
414 // uses and implicit set_locals for defs.
415 if (UseInst->getDesc().getNumDefs() == 0)
417 const MachineOperand &MO = UseInst->getOperand(0);
420 unsigned DefReg = MO.getReg();
421 if (!TargetRegisterInfo::isVirtualRegister(DefReg) ||
422 !MFI.isVRegStackified(DefReg))
424 assert(MRI.hasOneUse(DefReg));
425 const MachineOperand &NewUse = *MRI.use_begin(DefReg);
426 const MachineInstr *NewUseInst = NewUse.getParent();
427 if (NewUseInst == OneUseInst) {
428 if (&OneUse > &NewUse)
432 UseInst = NewUseInst;
439 /// Get the appropriate tee opcode for the given register class.
440 static unsigned GetTeeOpcode(const TargetRegisterClass *RC) {
441 if (RC == &WebAssembly::I32RegClass)
442 return WebAssembly::TEE_I32;
443 if (RC == &WebAssembly::I64RegClass)
444 return WebAssembly::TEE_I64;
445 if (RC == &WebAssembly::F32RegClass)
446 return WebAssembly::TEE_F32;
447 if (RC == &WebAssembly::F64RegClass)
448 return WebAssembly::TEE_F64;
449 if (RC == &WebAssembly::V128RegClass)
450 return WebAssembly::TEE_V128;
451 llvm_unreachable("Unexpected register class");
454 // Shrink LI to its uses, cleaning up LI.
455 static void ShrinkToUses(LiveInterval &LI, LiveIntervals &LIS) {
456 if (LIS.shrinkToUses(&LI)) {
457 SmallVector<LiveInterval*, 4> SplitLIs;
458 LIS.splitSeparateComponents(LI, SplitLIs);
462 /// A single-use def in the same block with no intervening memory or register
463 /// dependencies; move the def down and nest it with the current instruction.
464 static MachineInstr *MoveForSingleUse(unsigned Reg, MachineOperand& Op,
466 MachineBasicBlock &MBB,
467 MachineInstr *Insert, LiveIntervals &LIS,
468 WebAssemblyFunctionInfo &MFI,
469 MachineRegisterInfo &MRI) {
470 DEBUG(dbgs() << "Move for single use: "; Def->dump());
472 MBB.splice(Insert, &MBB, Def);
473 LIS.handleMove(*Def);
475 if (MRI.hasOneDef(Reg) && MRI.hasOneUse(Reg)) {
476 // No one else is using this register for anything so we can just stackify
478 MFI.stackifyVReg(Reg);
480 // The register may have unrelated uses or defs; create a new register for
481 // just our one def and use so that we can stackify it.
482 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
483 Def->getOperand(0).setReg(NewReg);
486 // Tell LiveIntervals about the new register.
487 LIS.createAndComputeVirtRegInterval(NewReg);
489 // Tell LiveIntervals about the changes to the old register.
490 LiveInterval &LI = LIS.getInterval(Reg);
491 LI.removeSegment(LIS.getInstructionIndex(*Def).getRegSlot(),
492 LIS.getInstructionIndex(*Op.getParent()).getRegSlot(),
493 /*RemoveDeadValNo=*/true);
495 MFI.stackifyVReg(NewReg);
497 DEBUG(dbgs() << " - Replaced register: "; Def->dump());
500 ImposeStackOrdering(Def);
504 /// A trivially cloneable instruction; clone it and nest the new copy with the
505 /// current instruction.
506 static MachineInstr *RematerializeCheapDef(
507 unsigned Reg, MachineOperand &Op, MachineInstr &Def, MachineBasicBlock &MBB,
508 MachineBasicBlock::instr_iterator Insert, LiveIntervals &LIS,
509 WebAssemblyFunctionInfo &MFI, MachineRegisterInfo &MRI,
510 const WebAssemblyInstrInfo *TII, const WebAssemblyRegisterInfo *TRI) {
511 DEBUG(dbgs() << "Rematerializing cheap def: "; Def.dump());
512 DEBUG(dbgs() << " - for use in "; Op.getParent()->dump());
514 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
515 TII->reMaterialize(MBB, Insert, NewReg, 0, Def, *TRI);
517 MachineInstr *Clone = &*std::prev(Insert);
518 LIS.InsertMachineInstrInMaps(*Clone);
519 LIS.createAndComputeVirtRegInterval(NewReg);
520 MFI.stackifyVReg(NewReg);
521 ImposeStackOrdering(Clone);
523 DEBUG(dbgs() << " - Cloned to "; Clone->dump());
525 // Shrink the interval.
526 bool IsDead = MRI.use_empty(Reg);
528 LiveInterval &LI = LIS.getInterval(Reg);
529 ShrinkToUses(LI, LIS);
530 IsDead = !LI.liveAt(LIS.getInstructionIndex(Def).getDeadSlot());
533 // If that was the last use of the original, delete the original.
535 DEBUG(dbgs() << " - Deleting original\n");
536 SlotIndex Idx = LIS.getInstructionIndex(Def).getRegSlot();
537 LIS.removePhysRegDefAt(WebAssembly::ARGUMENTS, Idx);
538 LIS.removeInterval(Reg);
539 LIS.RemoveMachineInstrFromMaps(Def);
540 Def.eraseFromParent();
546 /// A multiple-use def in the same block with no intervening memory or register
547 /// dependencies; move the def down, nest it with the current instruction, and
548 /// insert a tee to satisfy the rest of the uses. As an illustration, rewrite
551 /// Reg = INST ... // Def
552 /// INST ..., Reg, ... // Insert
553 /// INST ..., Reg, ...
554 /// INST ..., Reg, ...
558 /// DefReg = INST ... // Def (to become the new Insert)
559 /// TeeReg, Reg = TEE_... DefReg
560 /// INST ..., TeeReg, ... // Insert
561 /// INST ..., Reg, ...
562 /// INST ..., Reg, ...
564 /// with DefReg and TeeReg stackified. This eliminates a get_local from the
566 static MachineInstr *MoveAndTeeForMultiUse(
567 unsigned Reg, MachineOperand &Op, MachineInstr *Def, MachineBasicBlock &MBB,
568 MachineInstr *Insert, LiveIntervals &LIS, WebAssemblyFunctionInfo &MFI,
569 MachineRegisterInfo &MRI, const WebAssemblyInstrInfo *TII) {
570 DEBUG(dbgs() << "Move and tee for multi-use:"; Def->dump());
572 // Move Def into place.
573 MBB.splice(Insert, &MBB, Def);
574 LIS.handleMove(*Def);
576 // Create the Tee and attach the registers.
577 const auto *RegClass = MRI.getRegClass(Reg);
578 unsigned TeeReg = MRI.createVirtualRegister(RegClass);
579 unsigned DefReg = MRI.createVirtualRegister(RegClass);
580 MachineOperand &DefMO = Def->getOperand(0);
581 MachineInstr *Tee = BuildMI(MBB, Insert, Insert->getDebugLoc(),
582 TII->get(GetTeeOpcode(RegClass)), TeeReg)
583 .addReg(Reg, RegState::Define)
584 .addReg(DefReg, getUndefRegState(DefMO.isDead()));
586 DefMO.setReg(DefReg);
587 SlotIndex TeeIdx = LIS.InsertMachineInstrInMaps(*Tee).getRegSlot();
588 SlotIndex DefIdx = LIS.getInstructionIndex(*Def).getRegSlot();
590 // Tell LiveIntervals we moved the original vreg def from Def to Tee.
591 LiveInterval &LI = LIS.getInterval(Reg);
592 LiveInterval::iterator I = LI.FindSegmentContaining(DefIdx);
593 VNInfo *ValNo = LI.getVNInfoAt(DefIdx);
596 ShrinkToUses(LI, LIS);
598 // Finish stackifying the new regs.
599 LIS.createAndComputeVirtRegInterval(TeeReg);
600 LIS.createAndComputeVirtRegInterval(DefReg);
601 MFI.stackifyVReg(DefReg);
602 MFI.stackifyVReg(TeeReg);
603 ImposeStackOrdering(Def);
604 ImposeStackOrdering(Tee);
606 DEBUG(dbgs() << " - Replaced register: "; Def->dump());
607 DEBUG(dbgs() << " - Tee instruction: "; Tee->dump());
612 /// A stack for walking the tree of instructions being built, visiting the
613 /// MachineOperands in DFS order.
614 class TreeWalkerState {
615 typedef MachineInstr::mop_iterator mop_iterator;
616 typedef std::reverse_iterator<mop_iterator> mop_reverse_iterator;
617 typedef iterator_range<mop_reverse_iterator> RangeTy;
618 SmallVector<RangeTy, 4> Worklist;
621 explicit TreeWalkerState(MachineInstr *Insert) {
622 const iterator_range<mop_iterator> &Range = Insert->explicit_uses();
623 if (Range.begin() != Range.end())
624 Worklist.push_back(reverse(Range));
627 bool Done() const { return Worklist.empty(); }
629 MachineOperand &Pop() {
630 RangeTy &Range = Worklist.back();
631 MachineOperand &Op = *Range.begin();
632 Range = drop_begin(Range, 1);
633 if (Range.begin() == Range.end())
635 assert((Worklist.empty() ||
636 Worklist.back().begin() != Worklist.back().end()) &&
637 "Empty ranges shouldn't remain in the worklist");
641 /// Push Instr's operands onto the stack to be visited.
642 void PushOperands(MachineInstr *Instr) {
643 const iterator_range<mop_iterator> &Range(Instr->explicit_uses());
644 if (Range.begin() != Range.end())
645 Worklist.push_back(reverse(Range));
648 /// Some of Instr's operands are on the top of the stack; remove them and
649 /// re-insert them starting from the beginning (because we've commuted them).
650 void ResetTopOperands(MachineInstr *Instr) {
651 assert(HasRemainingOperands(Instr) &&
652 "Reseting operands should only be done when the instruction has "
653 "an operand still on the stack");
654 Worklist.back() = reverse(Instr->explicit_uses());
657 /// Test whether Instr has operands remaining to be visited at the top of
659 bool HasRemainingOperands(const MachineInstr *Instr) const {
660 if (Worklist.empty())
662 const RangeTy &Range = Worklist.back();
663 return Range.begin() != Range.end() && Range.begin()->getParent() == Instr;
666 /// Test whether the given register is present on the stack, indicating an
667 /// operand in the tree that we haven't visited yet. Moving a definition of
668 /// Reg to a point in the tree after that would change its value.
670 /// This is needed as a consequence of using implicit get_locals for
671 /// uses and implicit set_locals for defs.
672 bool IsOnStack(unsigned Reg) const {
673 for (const RangeTy &Range : Worklist)
674 for (const MachineOperand &MO : Range)
675 if (MO.isReg() && MO.getReg() == Reg)
681 /// State to keep track of whether commuting is in flight or whether it's been
682 /// tried for the current instruction and didn't work.
683 class CommutingState {
684 /// There are effectively three states: the initial state where we haven't
685 /// started commuting anything and we don't know anything yet, the tenative
686 /// state where we've commuted the operands of the current instruction and are
687 /// revisting it, and the declined state where we've reverted the operands
688 /// back to their original order and will no longer commute it further.
689 bool TentativelyCommuting;
692 /// During the tentative state, these hold the operand indices of the commuted
694 unsigned Operand0, Operand1;
697 CommutingState() : TentativelyCommuting(false), Declined(false) {}
699 /// Stackification for an operand was not successful due to ordering
700 /// constraints. If possible, and if we haven't already tried it and declined
701 /// it, commute Insert's operands and prepare to revisit it.
702 void MaybeCommute(MachineInstr *Insert, TreeWalkerState &TreeWalker,
703 const WebAssemblyInstrInfo *TII) {
704 if (TentativelyCommuting) {
706 "Don't decline commuting until you've finished trying it");
707 // Commuting didn't help. Revert it.
708 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
709 TentativelyCommuting = false;
711 } else if (!Declined && TreeWalker.HasRemainingOperands(Insert)) {
712 Operand0 = TargetInstrInfo::CommuteAnyOperandIndex;
713 Operand1 = TargetInstrInfo::CommuteAnyOperandIndex;
714 if (TII->findCommutedOpIndices(*Insert, Operand0, Operand1)) {
715 // Tentatively commute the operands and try again.
716 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
717 TreeWalker.ResetTopOperands(Insert);
718 TentativelyCommuting = true;
724 /// Stackification for some operand was successful. Reset to the default
727 TentativelyCommuting = false;
731 } // end anonymous namespace
733 bool WebAssemblyRegStackify::runOnMachineFunction(MachineFunction &MF) {
734 DEBUG(dbgs() << "********** Register Stackifying **********\n"
735 "********** Function: "
736 << MF.getName() << '\n');
738 bool Changed = false;
739 MachineRegisterInfo &MRI = MF.getRegInfo();
740 WebAssemblyFunctionInfo &MFI = *MF.getInfo<WebAssemblyFunctionInfo>();
741 const auto *TII = MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo();
742 const auto *TRI = MF.getSubtarget<WebAssemblySubtarget>().getRegisterInfo();
743 AliasAnalysis &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
744 MachineDominatorTree &MDT = getAnalysis<MachineDominatorTree>();
745 LiveIntervals &LIS = getAnalysis<LiveIntervals>();
747 // Walk the instructions from the bottom up. Currently we don't look past
748 // block boundaries, and the blocks aren't ordered so the block visitation
749 // order isn't significant, but we may want to change this in the future.
750 for (MachineBasicBlock &MBB : MF) {
751 // Don't use a range-based for loop, because we modify the list as we're
752 // iterating over it and the end iterator may change.
753 for (auto MII = MBB.rbegin(); MII != MBB.rend(); ++MII) {
754 MachineInstr *Insert = &*MII;
755 // Don't nest anything inside an inline asm, because we don't have
756 // constraints for $push inputs.
757 if (Insert->getOpcode() == TargetOpcode::INLINEASM)
760 // Ignore debugging intrinsics.
761 if (Insert->getOpcode() == TargetOpcode::DBG_VALUE)
764 // Iterate through the inputs in reverse order, since we'll be pulling
765 // operands off the stack in LIFO order.
766 CommutingState Commuting;
767 TreeWalkerState TreeWalker(Insert);
768 while (!TreeWalker.Done()) {
769 MachineOperand &Op = TreeWalker.Pop();
771 // We're only interested in explicit virtual register operands.
775 unsigned Reg = Op.getReg();
776 assert(Op.isUse() && "explicit_uses() should only iterate over uses");
777 assert(!Op.isImplicit() &&
778 "explicit_uses() should only iterate over explicit operands");
779 if (TargetRegisterInfo::isPhysicalRegister(Reg))
782 // Identify the definition for this register at this point.
783 MachineInstr *Def = GetVRegDef(Reg, Insert, MRI, LIS);
787 // Don't nest an INLINE_ASM def into anything, because we don't have
788 // constraints for $pop outputs.
789 if (Def->getOpcode() == TargetOpcode::INLINEASM)
792 // Argument instructions represent live-in registers and not real
794 if (WebAssembly::isArgument(*Def))
797 // Decide which strategy to take. Prefer to move a single-use value
798 // over cloning it, and prefer cloning over introducing a tee.
799 // For moving, we require the def to be in the same block as the use;
800 // this makes things simpler (LiveIntervals' handleMove function only
801 // supports intra-block moves) and it's MachineSink's job to catch all
802 // the sinking opportunities anyway.
803 bool SameBlock = Def->getParent() == &MBB;
804 bool CanMove = SameBlock && IsSafeToMove(Def, Insert, AA, MRI) &&
805 !TreeWalker.IsOnStack(Reg);
806 if (CanMove && HasOneUse(Reg, Def, MRI, MDT, LIS)) {
807 Insert = MoveForSingleUse(Reg, Op, Def, MBB, Insert, LIS, MFI, MRI);
808 } else if (ShouldRematerialize(*Def, AA, TII)) {
810 RematerializeCheapDef(Reg, Op, *Def, MBB, Insert->getIterator(),
811 LIS, MFI, MRI, TII, TRI);
812 } else if (CanMove &&
813 OneUseDominatesOtherUses(Reg, Op, MBB, MRI, MDT, LIS, MFI)) {
814 Insert = MoveAndTeeForMultiUse(Reg, Op, Def, MBB, Insert, LIS, MFI,
817 // We failed to stackify the operand. If the problem was ordering
818 // constraints, Commuting may be able to help.
819 if (!CanMove && SameBlock)
820 Commuting.MaybeCommute(Insert, TreeWalker, TII);
821 // Proceed to the next operand.
825 // If the instruction we just stackified is an IMPLICIT_DEF, convert it
826 // to a constant 0 so that the def is explicit, and the push/pop
827 // correspondence is maintained.
828 if (Insert->getOpcode() == TargetOpcode::IMPLICIT_DEF)
829 ConvertImplicitDefToConstZero(Insert, MRI, TII, MF);
831 // We stackified an operand. Add the defining instruction's operands to
832 // the worklist stack now to continue to build an ever deeper tree.
834 TreeWalker.PushOperands(Insert);
837 // If we stackified any operands, skip over the tree to start looking for
838 // the next instruction we can build a tree on.
839 if (Insert != &*MII) {
840 ImposeStackOrdering(&*MII);
841 MII = MachineBasicBlock::iterator(Insert).getReverse();
847 // If we used VALUE_STACK anywhere, add it to the live-in sets everywhere so
848 // that it never looks like a use-before-def.
850 MF.getRegInfo().addLiveIn(WebAssembly::VALUE_STACK);
851 for (MachineBasicBlock &MBB : MF)
852 MBB.addLiveIn(WebAssembly::VALUE_STACK);
856 // Verify that pushes and pops are performed in LIFO order.
857 SmallVector<unsigned, 0> Stack;
858 for (MachineBasicBlock &MBB : MF) {
859 for (MachineInstr &MI : MBB) {
860 if (MI.isDebugValue())
862 for (MachineOperand &MO : reverse(MI.explicit_operands())) {
865 unsigned Reg = MO.getReg();
867 if (MFI.isVRegStackified(Reg)) {
869 Stack.push_back(Reg);
871 assert(Stack.pop_back_val() == Reg &&
872 "Register stack pop should be paired with a push");
876 // TODO: Generalize this code to support keeping values on the stack across
877 // basic block boundaries.
878 assert(Stack.empty() &&
879 "Register stack pushes and pops should be balanced");