1 //===-- WebAssemblyRegStackify.cpp - Register Stackification --------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
11 /// \brief This file implements a register stacking pass.
13 /// This pass reorders instructions to put register uses and defs in an order
14 /// such that they form single-use expression trees. Registers fitting this form
15 /// are then marked as "stackified", meaning references to them are replaced by
16 /// "push" and "pop" from the value stack.
18 /// This is primarily a code size optimization, since temporary values on the
19 /// value stack don't need to be named.
21 //===----------------------------------------------------------------------===//
23 #include "MCTargetDesc/WebAssemblyMCTargetDesc.h" // for WebAssembly::ARGUMENT_*
24 #include "WebAssembly.h"
25 #include "WebAssemblyMachineFunctionInfo.h"
26 #include "WebAssemblySubtarget.h"
27 #include "WebAssemblyUtilities.h"
28 #include "llvm/Analysis/AliasAnalysis.h"
29 #include "llvm/CodeGen/LiveIntervals.h"
30 #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
31 #include "llvm/CodeGen/MachineDominators.h"
32 #include "llvm/CodeGen/MachineInstrBuilder.h"
33 #include "llvm/CodeGen/MachineModuleInfoImpls.h"
34 #include "llvm/CodeGen/MachineRegisterInfo.h"
35 #include "llvm/CodeGen/Passes.h"
36 #include "llvm/Support/Debug.h"
37 #include "llvm/Support/raw_ostream.h"
40 #define DEBUG_TYPE "wasm-reg-stackify"
43 class WebAssemblyRegStackify final : public MachineFunctionPass {
44 StringRef getPassName() const override {
45 return "WebAssembly Register Stackify";
48 void getAnalysisUsage(AnalysisUsage &AU) const override {
50 AU.addRequired<AAResultsWrapperPass>();
51 AU.addRequired<MachineDominatorTree>();
52 AU.addRequired<LiveIntervals>();
53 AU.addPreserved<MachineBlockFrequencyInfo>();
54 AU.addPreserved<SlotIndexes>();
55 AU.addPreserved<LiveIntervals>();
56 AU.addPreservedID(LiveVariablesID);
57 AU.addPreserved<MachineDominatorTree>();
58 MachineFunctionPass::getAnalysisUsage(AU);
61 bool runOnMachineFunction(MachineFunction &MF) override;
64 static char ID; // Pass identification, replacement for typeid
65 WebAssemblyRegStackify() : MachineFunctionPass(ID) {}
67 } // end anonymous namespace
69 char WebAssemblyRegStackify::ID = 0;
70 FunctionPass *llvm::createWebAssemblyRegStackify() {
71 return new WebAssemblyRegStackify();
74 // Decorate the given instruction with implicit operands that enforce the
75 // expression stack ordering constraints for an instruction which is on
76 // the expression stack.
77 static void ImposeStackOrdering(MachineInstr *MI) {
78 // Write the opaque VALUE_STACK register.
79 if (!MI->definesRegister(WebAssembly::VALUE_STACK))
80 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
84 // Also read the opaque VALUE_STACK register.
85 if (!MI->readsRegister(WebAssembly::VALUE_STACK))
86 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
91 // Convert an IMPLICIT_DEF instruction into an instruction which defines
92 // a constant zero value.
93 static void ConvertImplicitDefToConstZero(MachineInstr *MI,
94 MachineRegisterInfo &MRI,
95 const TargetInstrInfo *TII,
96 MachineFunction &MF) {
97 assert(MI->getOpcode() == TargetOpcode::IMPLICIT_DEF);
99 const auto *RegClass =
100 MRI.getRegClass(MI->getOperand(0).getReg());
101 if (RegClass == &WebAssembly::I32RegClass) {
102 MI->setDesc(TII->get(WebAssembly::CONST_I32));
103 MI->addOperand(MachineOperand::CreateImm(0));
104 } else if (RegClass == &WebAssembly::I64RegClass) {
105 MI->setDesc(TII->get(WebAssembly::CONST_I64));
106 MI->addOperand(MachineOperand::CreateImm(0));
107 } else if (RegClass == &WebAssembly::F32RegClass) {
108 MI->setDesc(TII->get(WebAssembly::CONST_F32));
109 ConstantFP *Val = cast<ConstantFP>(Constant::getNullValue(
110 Type::getFloatTy(MF.getFunction().getContext())));
111 MI->addOperand(MachineOperand::CreateFPImm(Val));
112 } else if (RegClass == &WebAssembly::F64RegClass) {
113 MI->setDesc(TII->get(WebAssembly::CONST_F64));
114 ConstantFP *Val = cast<ConstantFP>(Constant::getNullValue(
115 Type::getDoubleTy(MF.getFunction().getContext())));
116 MI->addOperand(MachineOperand::CreateFPImm(Val));
118 llvm_unreachable("Unexpected reg class");
122 // Determine whether a call to the callee referenced by
123 // MI->getOperand(CalleeOpNo) reads memory, writes memory, and/or has side
125 static void QueryCallee(const MachineInstr &MI, unsigned CalleeOpNo, bool &Read,
126 bool &Write, bool &Effects, bool &StackPointer) {
127 // All calls can use the stack pointer.
130 const MachineOperand &MO = MI.getOperand(CalleeOpNo);
132 const Constant *GV = MO.getGlobal();
133 if (const GlobalAlias *GA = dyn_cast<GlobalAlias>(GV))
134 if (!GA->isInterposable())
135 GV = GA->getAliasee();
137 if (const Function *F = dyn_cast<Function>(GV)) {
138 if (!F->doesNotThrow())
140 if (F->doesNotAccessMemory())
142 if (F->onlyReadsMemory()) {
155 // Determine whether MI reads memory, writes memory, has side effects,
156 // and/or uses the stack pointer value.
157 static void Query(const MachineInstr &MI, AliasAnalysis &AA, bool &Read,
158 bool &Write, bool &Effects, bool &StackPointer) {
159 assert(!MI.isPosition());
160 assert(!MI.isTerminator());
162 if (MI.isDebugValue())
166 if (MI.mayLoad() && !MI.isDereferenceableInvariantLoad(&AA))
173 // Check for stores to __stack_pointer.
174 for (auto MMO : MI.memoperands()) {
175 const MachinePointerInfo &MPI = MMO->getPointerInfo();
176 if (MPI.V.is<const PseudoSourceValue *>()) {
177 auto PSV = MPI.V.get<const PseudoSourceValue *>();
178 if (const ExternalSymbolPseudoSourceValue *EPSV =
179 dyn_cast<ExternalSymbolPseudoSourceValue>(PSV))
180 if (StringRef(EPSV->getSymbol()) == "__stack_pointer") {
185 } else if (MI.hasOrderedMemoryRef()) {
186 switch (MI.getOpcode()) {
187 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64:
188 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64:
189 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64:
190 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64:
191 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32:
192 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64:
193 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32:
194 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64:
195 // These instruction have hasUnmodeledSideEffects() returning true
196 // because they trap on overflow and invalid so they can't be arbitrarily
197 // moved, however hasOrderedMemoryRef() interprets this plus their lack
198 // of memoperands as having a potential unknown memory reference.
201 // Record volatile accesses, unless it's a call, as calls are handled
211 // Check for side effects.
212 if (MI.hasUnmodeledSideEffects()) {
213 switch (MI.getOpcode()) {
214 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64:
215 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64:
216 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64:
217 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64:
218 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32:
219 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64:
220 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32:
221 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64:
222 // These instructions have hasUnmodeledSideEffects() returning true
223 // because they trap on overflow and invalid so they can't be arbitrarily
224 // moved, however in the specific case of register stackifying, it is safe
225 // to move them because overflow and invalid are Undefined Behavior.
235 switch (MI.getOpcode()) {
236 case WebAssembly::CALL_VOID:
237 case WebAssembly::CALL_INDIRECT_VOID:
238 QueryCallee(MI, 0, Read, Write, Effects, StackPointer);
240 case WebAssembly::CALL_I32: case WebAssembly::CALL_I64:
241 case WebAssembly::CALL_F32: case WebAssembly::CALL_F64:
242 case WebAssembly::CALL_INDIRECT_I32: case WebAssembly::CALL_INDIRECT_I64:
243 case WebAssembly::CALL_INDIRECT_F32: case WebAssembly::CALL_INDIRECT_F64:
244 QueryCallee(MI, 1, Read, Write, Effects, StackPointer);
247 llvm_unreachable("unexpected call opcode");
252 // Test whether Def is safe and profitable to rematerialize.
253 static bool ShouldRematerialize(const MachineInstr &Def, AliasAnalysis &AA,
254 const WebAssemblyInstrInfo *TII) {
255 return Def.isAsCheapAsAMove() && TII->isTriviallyReMaterializable(Def, &AA);
258 // Identify the definition for this register at this point. This is a
259 // generalization of MachineRegisterInfo::getUniqueVRegDef that uses
260 // LiveIntervals to handle complex cases.
261 static MachineInstr *GetVRegDef(unsigned Reg, const MachineInstr *Insert,
262 const MachineRegisterInfo &MRI,
263 const LiveIntervals &LIS)
265 // Most registers are in SSA form here so we try a quick MRI query first.
266 if (MachineInstr *Def = MRI.getUniqueVRegDef(Reg))
269 // MRI doesn't know what the Def is. Try asking LIS.
270 if (const VNInfo *ValNo = LIS.getInterval(Reg).getVNInfoBefore(
271 LIS.getInstructionIndex(*Insert)))
272 return LIS.getInstructionFromIndex(ValNo->def);
277 // Test whether Reg, as defined at Def, has exactly one use. This is a
278 // generalization of MachineRegisterInfo::hasOneUse that uses LiveIntervals
279 // to handle complex cases.
280 static bool HasOneUse(unsigned Reg, MachineInstr *Def,
281 MachineRegisterInfo &MRI, MachineDominatorTree &MDT,
282 LiveIntervals &LIS) {
283 // Most registers are in SSA form here so we try a quick MRI query first.
284 if (MRI.hasOneUse(Reg))
288 const LiveInterval &LI = LIS.getInterval(Reg);
289 const VNInfo *DefVNI = LI.getVNInfoAt(
290 LIS.getInstructionIndex(*Def).getRegSlot());
292 for (auto &I : MRI.use_nodbg_operands(Reg)) {
293 const auto &Result = LI.Query(LIS.getInstructionIndex(*I.getParent()));
294 if (Result.valueIn() == DefVNI) {
295 if (!Result.isKill())
305 // Test whether it's safe to move Def to just before Insert.
306 // TODO: Compute memory dependencies in a way that doesn't require always
307 // walking the block.
308 // TODO: Compute memory dependencies in a way that uses AliasAnalysis to be
310 static bool IsSafeToMove(const MachineInstr *Def, const MachineInstr *Insert,
311 AliasAnalysis &AA, const MachineRegisterInfo &MRI) {
312 assert(Def->getParent() == Insert->getParent());
314 // Check for register dependencies.
315 SmallVector<unsigned, 4> MutableRegisters;
316 for (const MachineOperand &MO : Def->operands()) {
317 if (!MO.isReg() || MO.isUndef())
319 unsigned Reg = MO.getReg();
321 // If the register is dead here and at Insert, ignore it.
322 if (MO.isDead() && Insert->definesRegister(Reg) &&
323 !Insert->readsRegister(Reg))
326 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
327 // Ignore ARGUMENTS; it's just used to keep the ARGUMENT_* instructions
328 // from moving down, and we've already checked for that.
329 if (Reg == WebAssembly::ARGUMENTS)
331 // If the physical register is never modified, ignore it.
332 if (!MRI.isPhysRegModified(Reg))
334 // Otherwise, it's a physical register with unknown liveness.
338 // If one of the operands isn't in SSA form, it has different values at
339 // different times, and we need to make sure we don't move our use across
341 if (!MO.isDef() && !MRI.hasOneDef(Reg))
342 MutableRegisters.push_back(Reg);
345 bool Read = false, Write = false, Effects = false, StackPointer = false;
346 Query(*Def, AA, Read, Write, Effects, StackPointer);
348 // If the instruction does not access memory and has no side effects, it has
349 // no additional dependencies.
350 bool HasMutableRegisters = !MutableRegisters.empty();
351 if (!Read && !Write && !Effects && !StackPointer && !HasMutableRegisters)
354 // Scan through the intervening instructions between Def and Insert.
355 MachineBasicBlock::const_iterator D(Def), I(Insert);
356 for (--I; I != D; --I) {
357 bool InterveningRead = false;
358 bool InterveningWrite = false;
359 bool InterveningEffects = false;
360 bool InterveningStackPointer = false;
361 Query(*I, AA, InterveningRead, InterveningWrite, InterveningEffects,
362 InterveningStackPointer);
363 if (Effects && InterveningEffects)
365 if (Read && InterveningWrite)
367 if (Write && (InterveningRead || InterveningWrite))
369 if (StackPointer && InterveningStackPointer)
372 for (unsigned Reg : MutableRegisters)
373 for (const MachineOperand &MO : I->operands())
374 if (MO.isReg() && MO.isDef() && MO.getReg() == Reg)
381 /// Test whether OneUse, a use of Reg, dominates all of Reg's other uses.
382 static bool OneUseDominatesOtherUses(unsigned Reg, const MachineOperand &OneUse,
383 const MachineBasicBlock &MBB,
384 const MachineRegisterInfo &MRI,
385 const MachineDominatorTree &MDT,
387 WebAssemblyFunctionInfo &MFI) {
388 const LiveInterval &LI = LIS.getInterval(Reg);
390 const MachineInstr *OneUseInst = OneUse.getParent();
391 VNInfo *OneUseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*OneUseInst));
393 for (const MachineOperand &Use : MRI.use_nodbg_operands(Reg)) {
397 const MachineInstr *UseInst = Use.getParent();
398 VNInfo *UseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*UseInst));
400 if (UseVNI != OneUseVNI)
403 const MachineInstr *OneUseInst = OneUse.getParent();
404 if (UseInst == OneUseInst) {
405 // Another use in the same instruction. We need to ensure that the one
406 // selected use happens "before" it.
410 // Test that the use is dominated by the one selected use.
411 while (!MDT.dominates(OneUseInst, UseInst)) {
412 // Actually, dominating is over-conservative. Test that the use would
413 // happen after the one selected use in the stack evaluation order.
415 // This is needed as a consequence of using implicit get_locals for
416 // uses and implicit set_locals for defs.
417 if (UseInst->getDesc().getNumDefs() == 0)
419 const MachineOperand &MO = UseInst->getOperand(0);
422 unsigned DefReg = MO.getReg();
423 if (!TargetRegisterInfo::isVirtualRegister(DefReg) ||
424 !MFI.isVRegStackified(DefReg))
426 assert(MRI.hasOneUse(DefReg));
427 const MachineOperand &NewUse = *MRI.use_begin(DefReg);
428 const MachineInstr *NewUseInst = NewUse.getParent();
429 if (NewUseInst == OneUseInst) {
430 if (&OneUse > &NewUse)
434 UseInst = NewUseInst;
441 /// Get the appropriate tee opcode for the given register class.
442 static unsigned GetTeeOpcode(const TargetRegisterClass *RC) {
443 if (RC == &WebAssembly::I32RegClass)
444 return WebAssembly::TEE_I32;
445 if (RC == &WebAssembly::I64RegClass)
446 return WebAssembly::TEE_I64;
447 if (RC == &WebAssembly::F32RegClass)
448 return WebAssembly::TEE_F32;
449 if (RC == &WebAssembly::F64RegClass)
450 return WebAssembly::TEE_F64;
451 if (RC == &WebAssembly::V128RegClass)
452 return WebAssembly::TEE_V128;
453 llvm_unreachable("Unexpected register class");
456 // Shrink LI to its uses, cleaning up LI.
457 static void ShrinkToUses(LiveInterval &LI, LiveIntervals &LIS) {
458 if (LIS.shrinkToUses(&LI)) {
459 SmallVector<LiveInterval*, 4> SplitLIs;
460 LIS.splitSeparateComponents(LI, SplitLIs);
464 /// A single-use def in the same block with no intervening memory or register
465 /// dependencies; move the def down and nest it with the current instruction.
466 static MachineInstr *MoveForSingleUse(unsigned Reg, MachineOperand& Op,
468 MachineBasicBlock &MBB,
469 MachineInstr *Insert, LiveIntervals &LIS,
470 WebAssemblyFunctionInfo &MFI,
471 MachineRegisterInfo &MRI) {
472 DEBUG(dbgs() << "Move for single use: "; Def->dump());
474 MBB.splice(Insert, &MBB, Def);
475 LIS.handleMove(*Def);
477 if (MRI.hasOneDef(Reg) && MRI.hasOneUse(Reg)) {
478 // No one else is using this register for anything so we can just stackify
480 MFI.stackifyVReg(Reg);
482 // The register may have unrelated uses or defs; create a new register for
483 // just our one def and use so that we can stackify it.
484 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
485 Def->getOperand(0).setReg(NewReg);
488 // Tell LiveIntervals about the new register.
489 LIS.createAndComputeVirtRegInterval(NewReg);
491 // Tell LiveIntervals about the changes to the old register.
492 LiveInterval &LI = LIS.getInterval(Reg);
493 LI.removeSegment(LIS.getInstructionIndex(*Def).getRegSlot(),
494 LIS.getInstructionIndex(*Op.getParent()).getRegSlot(),
495 /*RemoveDeadValNo=*/true);
497 MFI.stackifyVReg(NewReg);
499 DEBUG(dbgs() << " - Replaced register: "; Def->dump());
502 ImposeStackOrdering(Def);
506 /// A trivially cloneable instruction; clone it and nest the new copy with the
507 /// current instruction.
508 static MachineInstr *RematerializeCheapDef(
509 unsigned Reg, MachineOperand &Op, MachineInstr &Def, MachineBasicBlock &MBB,
510 MachineBasicBlock::instr_iterator Insert, LiveIntervals &LIS,
511 WebAssemblyFunctionInfo &MFI, MachineRegisterInfo &MRI,
512 const WebAssemblyInstrInfo *TII, const WebAssemblyRegisterInfo *TRI) {
513 DEBUG(dbgs() << "Rematerializing cheap def: "; Def.dump());
514 DEBUG(dbgs() << " - for use in "; Op.getParent()->dump());
516 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
517 TII->reMaterialize(MBB, Insert, NewReg, 0, Def, *TRI);
519 MachineInstr *Clone = &*std::prev(Insert);
520 LIS.InsertMachineInstrInMaps(*Clone);
521 LIS.createAndComputeVirtRegInterval(NewReg);
522 MFI.stackifyVReg(NewReg);
523 ImposeStackOrdering(Clone);
525 DEBUG(dbgs() << " - Cloned to "; Clone->dump());
527 // Shrink the interval.
528 bool IsDead = MRI.use_empty(Reg);
530 LiveInterval &LI = LIS.getInterval(Reg);
531 ShrinkToUses(LI, LIS);
532 IsDead = !LI.liveAt(LIS.getInstructionIndex(Def).getDeadSlot());
535 // If that was the last use of the original, delete the original.
537 DEBUG(dbgs() << " - Deleting original\n");
538 SlotIndex Idx = LIS.getInstructionIndex(Def).getRegSlot();
539 LIS.removePhysRegDefAt(WebAssembly::ARGUMENTS, Idx);
540 LIS.removeInterval(Reg);
541 LIS.RemoveMachineInstrFromMaps(Def);
542 Def.eraseFromParent();
548 /// A multiple-use def in the same block with no intervening memory or register
549 /// dependencies; move the def down, nest it with the current instruction, and
550 /// insert a tee to satisfy the rest of the uses. As an illustration, rewrite
553 /// Reg = INST ... // Def
554 /// INST ..., Reg, ... // Insert
555 /// INST ..., Reg, ...
556 /// INST ..., Reg, ...
560 /// DefReg = INST ... // Def (to become the new Insert)
561 /// TeeReg, Reg = TEE_... DefReg
562 /// INST ..., TeeReg, ... // Insert
563 /// INST ..., Reg, ...
564 /// INST ..., Reg, ...
566 /// with DefReg and TeeReg stackified. This eliminates a get_local from the
568 static MachineInstr *MoveAndTeeForMultiUse(
569 unsigned Reg, MachineOperand &Op, MachineInstr *Def, MachineBasicBlock &MBB,
570 MachineInstr *Insert, LiveIntervals &LIS, WebAssemblyFunctionInfo &MFI,
571 MachineRegisterInfo &MRI, const WebAssemblyInstrInfo *TII) {
572 DEBUG(dbgs() << "Move and tee for multi-use:"; Def->dump());
574 // Move Def into place.
575 MBB.splice(Insert, &MBB, Def);
576 LIS.handleMove(*Def);
578 // Create the Tee and attach the registers.
579 const auto *RegClass = MRI.getRegClass(Reg);
580 unsigned TeeReg = MRI.createVirtualRegister(RegClass);
581 unsigned DefReg = MRI.createVirtualRegister(RegClass);
582 MachineOperand &DefMO = Def->getOperand(0);
583 MachineInstr *Tee = BuildMI(MBB, Insert, Insert->getDebugLoc(),
584 TII->get(GetTeeOpcode(RegClass)), TeeReg)
585 .addReg(Reg, RegState::Define)
586 .addReg(DefReg, getUndefRegState(DefMO.isDead()));
588 DefMO.setReg(DefReg);
589 SlotIndex TeeIdx = LIS.InsertMachineInstrInMaps(*Tee).getRegSlot();
590 SlotIndex DefIdx = LIS.getInstructionIndex(*Def).getRegSlot();
592 // Tell LiveIntervals we moved the original vreg def from Def to Tee.
593 LiveInterval &LI = LIS.getInterval(Reg);
594 LiveInterval::iterator I = LI.FindSegmentContaining(DefIdx);
595 VNInfo *ValNo = LI.getVNInfoAt(DefIdx);
598 ShrinkToUses(LI, LIS);
600 // Finish stackifying the new regs.
601 LIS.createAndComputeVirtRegInterval(TeeReg);
602 LIS.createAndComputeVirtRegInterval(DefReg);
603 MFI.stackifyVReg(DefReg);
604 MFI.stackifyVReg(TeeReg);
605 ImposeStackOrdering(Def);
606 ImposeStackOrdering(Tee);
608 DEBUG(dbgs() << " - Replaced register: "; Def->dump());
609 DEBUG(dbgs() << " - Tee instruction: "; Tee->dump());
614 /// A stack for walking the tree of instructions being built, visiting the
615 /// MachineOperands in DFS order.
616 class TreeWalkerState {
617 typedef MachineInstr::mop_iterator mop_iterator;
618 typedef std::reverse_iterator<mop_iterator> mop_reverse_iterator;
619 typedef iterator_range<mop_reverse_iterator> RangeTy;
620 SmallVector<RangeTy, 4> Worklist;
623 explicit TreeWalkerState(MachineInstr *Insert) {
624 const iterator_range<mop_iterator> &Range = Insert->explicit_uses();
625 if (Range.begin() != Range.end())
626 Worklist.push_back(reverse(Range));
629 bool Done() const { return Worklist.empty(); }
631 MachineOperand &Pop() {
632 RangeTy &Range = Worklist.back();
633 MachineOperand &Op = *Range.begin();
634 Range = drop_begin(Range, 1);
635 if (Range.begin() == Range.end())
637 assert((Worklist.empty() ||
638 Worklist.back().begin() != Worklist.back().end()) &&
639 "Empty ranges shouldn't remain in the worklist");
643 /// Push Instr's operands onto the stack to be visited.
644 void PushOperands(MachineInstr *Instr) {
645 const iterator_range<mop_iterator> &Range(Instr->explicit_uses());
646 if (Range.begin() != Range.end())
647 Worklist.push_back(reverse(Range));
650 /// Some of Instr's operands are on the top of the stack; remove them and
651 /// re-insert them starting from the beginning (because we've commuted them).
652 void ResetTopOperands(MachineInstr *Instr) {
653 assert(HasRemainingOperands(Instr) &&
654 "Reseting operands should only be done when the instruction has "
655 "an operand still on the stack");
656 Worklist.back() = reverse(Instr->explicit_uses());
659 /// Test whether Instr has operands remaining to be visited at the top of
661 bool HasRemainingOperands(const MachineInstr *Instr) const {
662 if (Worklist.empty())
664 const RangeTy &Range = Worklist.back();
665 return Range.begin() != Range.end() && Range.begin()->getParent() == Instr;
668 /// Test whether the given register is present on the stack, indicating an
669 /// operand in the tree that we haven't visited yet. Moving a definition of
670 /// Reg to a point in the tree after that would change its value.
672 /// This is needed as a consequence of using implicit get_locals for
673 /// uses and implicit set_locals for defs.
674 bool IsOnStack(unsigned Reg) const {
675 for (const RangeTy &Range : Worklist)
676 for (const MachineOperand &MO : Range)
677 if (MO.isReg() && MO.getReg() == Reg)
683 /// State to keep track of whether commuting is in flight or whether it's been
684 /// tried for the current instruction and didn't work.
685 class CommutingState {
686 /// There are effectively three states: the initial state where we haven't
687 /// started commuting anything and we don't know anything yet, the tenative
688 /// state where we've commuted the operands of the current instruction and are
689 /// revisting it, and the declined state where we've reverted the operands
690 /// back to their original order and will no longer commute it further.
691 bool TentativelyCommuting;
694 /// During the tentative state, these hold the operand indices of the commuted
696 unsigned Operand0, Operand1;
699 CommutingState() : TentativelyCommuting(false), Declined(false) {}
701 /// Stackification for an operand was not successful due to ordering
702 /// constraints. If possible, and if we haven't already tried it and declined
703 /// it, commute Insert's operands and prepare to revisit it.
704 void MaybeCommute(MachineInstr *Insert, TreeWalkerState &TreeWalker,
705 const WebAssemblyInstrInfo *TII) {
706 if (TentativelyCommuting) {
708 "Don't decline commuting until you've finished trying it");
709 // Commuting didn't help. Revert it.
710 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
711 TentativelyCommuting = false;
713 } else if (!Declined && TreeWalker.HasRemainingOperands(Insert)) {
714 Operand0 = TargetInstrInfo::CommuteAnyOperandIndex;
715 Operand1 = TargetInstrInfo::CommuteAnyOperandIndex;
716 if (TII->findCommutedOpIndices(*Insert, Operand0, Operand1)) {
717 // Tentatively commute the operands and try again.
718 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
719 TreeWalker.ResetTopOperands(Insert);
720 TentativelyCommuting = true;
726 /// Stackification for some operand was successful. Reset to the default
729 TentativelyCommuting = false;
733 } // end anonymous namespace
735 bool WebAssemblyRegStackify::runOnMachineFunction(MachineFunction &MF) {
736 DEBUG(dbgs() << "********** Register Stackifying **********\n"
737 "********** Function: "
738 << MF.getName() << '\n');
740 bool Changed = false;
741 MachineRegisterInfo &MRI = MF.getRegInfo();
742 WebAssemblyFunctionInfo &MFI = *MF.getInfo<WebAssemblyFunctionInfo>();
743 const auto *TII = MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo();
744 const auto *TRI = MF.getSubtarget<WebAssemblySubtarget>().getRegisterInfo();
745 AliasAnalysis &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
746 MachineDominatorTree &MDT = getAnalysis<MachineDominatorTree>();
747 LiveIntervals &LIS = getAnalysis<LiveIntervals>();
749 // Disable the TEE optimization if we aren't doing direct wasm object
750 // emission, because lowering TEE to TEE_LOCAL is done in the ExplicitLocals
751 // pass, which is also disabled.
753 if (MF.getSubtarget<WebAssemblySubtarget>()
754 .getTargetTriple().isOSBinFormatELF())
757 // Walk the instructions from the bottom up. Currently we don't look past
758 // block boundaries, and the blocks aren't ordered so the block visitation
759 // order isn't significant, but we may want to change this in the future.
760 for (MachineBasicBlock &MBB : MF) {
761 // Don't use a range-based for loop, because we modify the list as we're
762 // iterating over it and the end iterator may change.
763 for (auto MII = MBB.rbegin(); MII != MBB.rend(); ++MII) {
764 MachineInstr *Insert = &*MII;
765 // Don't nest anything inside an inline asm, because we don't have
766 // constraints for $push inputs.
767 if (Insert->getOpcode() == TargetOpcode::INLINEASM)
770 // Ignore debugging intrinsics.
771 if (Insert->getOpcode() == TargetOpcode::DBG_VALUE)
774 // Iterate through the inputs in reverse order, since we'll be pulling
775 // operands off the stack in LIFO order.
776 CommutingState Commuting;
777 TreeWalkerState TreeWalker(Insert);
778 while (!TreeWalker.Done()) {
779 MachineOperand &Op = TreeWalker.Pop();
781 // We're only interested in explicit virtual register operands.
785 unsigned Reg = Op.getReg();
786 assert(Op.isUse() && "explicit_uses() should only iterate over uses");
787 assert(!Op.isImplicit() &&
788 "explicit_uses() should only iterate over explicit operands");
789 if (TargetRegisterInfo::isPhysicalRegister(Reg))
792 // Identify the definition for this register at this point.
793 MachineInstr *Def = GetVRegDef(Reg, Insert, MRI, LIS);
797 // Don't nest an INLINE_ASM def into anything, because we don't have
798 // constraints for $pop outputs.
799 if (Def->getOpcode() == TargetOpcode::INLINEASM)
802 // Argument instructions represent live-in registers and not real
804 if (WebAssembly::isArgument(*Def))
807 // Decide which strategy to take. Prefer to move a single-use value
808 // over cloning it, and prefer cloning over introducing a tee.
809 // For moving, we require the def to be in the same block as the use;
810 // this makes things simpler (LiveIntervals' handleMove function only
811 // supports intra-block moves) and it's MachineSink's job to catch all
812 // the sinking opportunities anyway.
813 bool SameBlock = Def->getParent() == &MBB;
814 bool CanMove = SameBlock && IsSafeToMove(Def, Insert, AA, MRI) &&
815 !TreeWalker.IsOnStack(Reg);
816 if (CanMove && HasOneUse(Reg, Def, MRI, MDT, LIS)) {
817 Insert = MoveForSingleUse(Reg, Op, Def, MBB, Insert, LIS, MFI, MRI);
818 } else if (ShouldRematerialize(*Def, AA, TII)) {
820 RematerializeCheapDef(Reg, Op, *Def, MBB, Insert->getIterator(),
821 LIS, MFI, MRI, TII, TRI);
822 } else if (UseTee && CanMove &&
823 OneUseDominatesOtherUses(Reg, Op, MBB, MRI, MDT, LIS, MFI)) {
824 Insert = MoveAndTeeForMultiUse(Reg, Op, Def, MBB, Insert, LIS, MFI,
827 // We failed to stackify the operand. If the problem was ordering
828 // constraints, Commuting may be able to help.
829 if (!CanMove && SameBlock)
830 Commuting.MaybeCommute(Insert, TreeWalker, TII);
831 // Proceed to the next operand.
835 // If the instruction we just stackified is an IMPLICIT_DEF, convert it
836 // to a constant 0 so that the def is explicit, and the push/pop
837 // correspondence is maintained.
838 if (Insert->getOpcode() == TargetOpcode::IMPLICIT_DEF)
839 ConvertImplicitDefToConstZero(Insert, MRI, TII, MF);
841 // We stackified an operand. Add the defining instruction's operands to
842 // the worklist stack now to continue to build an ever deeper tree.
844 TreeWalker.PushOperands(Insert);
847 // If we stackified any operands, skip over the tree to start looking for
848 // the next instruction we can build a tree on.
849 if (Insert != &*MII) {
850 ImposeStackOrdering(&*MII);
851 MII = MachineBasicBlock::iterator(Insert).getReverse();
857 // If we used VALUE_STACK anywhere, add it to the live-in sets everywhere so
858 // that it never looks like a use-before-def.
860 MF.getRegInfo().addLiveIn(WebAssembly::VALUE_STACK);
861 for (MachineBasicBlock &MBB : MF)
862 MBB.addLiveIn(WebAssembly::VALUE_STACK);
866 // Verify that pushes and pops are performed in LIFO order.
867 SmallVector<unsigned, 0> Stack;
868 for (MachineBasicBlock &MBB : MF) {
869 for (MachineInstr &MI : MBB) {
870 if (MI.isDebugValue())
872 for (MachineOperand &MO : reverse(MI.explicit_operands())) {
875 unsigned Reg = MO.getReg();
877 if (MFI.isVRegStackified(Reg)) {
879 Stack.push_back(Reg);
881 assert(Stack.pop_back_val() == Reg &&
882 "Register stack pop should be paired with a push");
886 // TODO: Generalize this code to support keeping values on the stack across
887 // basic block boundaries.
888 assert(Stack.empty() &&
889 "Register stack pushes and pops should be balanced");