1 //===-- WebAssemblyRegStackify.cpp - Register Stackification --------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
11 /// This file implements a register stacking pass.
13 /// This pass reorders instructions to put register uses and defs in an order
14 /// such that they form single-use expression trees. Registers fitting this form
15 /// are then marked as "stackified", meaning references to them are replaced by
16 /// "push" and "pop" from the value stack.
18 /// This is primarily a code size optimization, since temporary values on the
19 /// value stack don't need to be named.
21 //===----------------------------------------------------------------------===//
23 #include "MCTargetDesc/WebAssemblyMCTargetDesc.h" // for WebAssembly::ARGUMENT_*
24 #include "WebAssembly.h"
25 #include "WebAssemblyDebugValueManager.h"
26 #include "WebAssemblyMachineFunctionInfo.h"
27 #include "WebAssemblySubtarget.h"
28 #include "WebAssemblyUtilities.h"
29 #include "llvm/ADT/SmallPtrSet.h"
30 #include "llvm/Analysis/AliasAnalysis.h"
31 #include "llvm/CodeGen/LiveIntervals.h"
32 #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
33 #include "llvm/CodeGen/MachineDominators.h"
34 #include "llvm/CodeGen/MachineInstrBuilder.h"
35 #include "llvm/CodeGen/MachineModuleInfoImpls.h"
36 #include "llvm/CodeGen/MachineRegisterInfo.h"
37 #include "llvm/CodeGen/Passes.h"
38 #include "llvm/Support/Debug.h"
39 #include "llvm/Support/raw_ostream.h"
42 #define DEBUG_TYPE "wasm-reg-stackify"
45 class WebAssemblyRegStackify final : public MachineFunctionPass {
46 StringRef getPassName() const override {
47 return "WebAssembly Register Stackify";
50 void getAnalysisUsage(AnalysisUsage &AU) const override {
52 AU.addRequired<AAResultsWrapperPass>();
53 AU.addRequired<MachineDominatorTree>();
54 AU.addRequired<LiveIntervals>();
55 AU.addPreserved<MachineBlockFrequencyInfo>();
56 AU.addPreserved<SlotIndexes>();
57 AU.addPreserved<LiveIntervals>();
58 AU.addPreservedID(LiveVariablesID);
59 AU.addPreserved<MachineDominatorTree>();
60 MachineFunctionPass::getAnalysisUsage(AU);
63 bool runOnMachineFunction(MachineFunction &MF) override;
66 static char ID; // Pass identification, replacement for typeid
67 WebAssemblyRegStackify() : MachineFunctionPass(ID) {}
69 } // end anonymous namespace
71 char WebAssemblyRegStackify::ID = 0;
72 INITIALIZE_PASS(WebAssemblyRegStackify, DEBUG_TYPE,
73 "Reorder instructions to use the WebAssembly value stack",
76 FunctionPass *llvm::createWebAssemblyRegStackify() {
77 return new WebAssemblyRegStackify();
80 // Decorate the given instruction with implicit operands that enforce the
81 // expression stack ordering constraints for an instruction which is on
82 // the expression stack.
83 static void ImposeStackOrdering(MachineInstr *MI) {
84 // Write the opaque VALUE_STACK register.
85 if (!MI->definesRegister(WebAssembly::VALUE_STACK))
86 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
90 // Also read the opaque VALUE_STACK register.
91 if (!MI->readsRegister(WebAssembly::VALUE_STACK))
92 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
97 // Convert an IMPLICIT_DEF instruction into an instruction which defines
98 // a constant zero value.
99 static void ConvertImplicitDefToConstZero(MachineInstr *MI,
100 MachineRegisterInfo &MRI,
101 const TargetInstrInfo *TII,
103 LiveIntervals &LIS) {
104 assert(MI->getOpcode() == TargetOpcode::IMPLICIT_DEF);
106 const auto *RegClass = MRI.getRegClass(MI->getOperand(0).getReg());
107 if (RegClass == &WebAssembly::I32RegClass) {
108 MI->setDesc(TII->get(WebAssembly::CONST_I32));
109 MI->addOperand(MachineOperand::CreateImm(0));
110 } else if (RegClass == &WebAssembly::I64RegClass) {
111 MI->setDesc(TII->get(WebAssembly::CONST_I64));
112 MI->addOperand(MachineOperand::CreateImm(0));
113 } else if (RegClass == &WebAssembly::F32RegClass) {
114 MI->setDesc(TII->get(WebAssembly::CONST_F32));
115 ConstantFP *Val = cast<ConstantFP>(Constant::getNullValue(
116 Type::getFloatTy(MF.getFunction().getContext())));
117 MI->addOperand(MachineOperand::CreateFPImm(Val));
118 } else if (RegClass == &WebAssembly::F64RegClass) {
119 MI->setDesc(TII->get(WebAssembly::CONST_F64));
120 ConstantFP *Val = cast<ConstantFP>(Constant::getNullValue(
121 Type::getDoubleTy(MF.getFunction().getContext())));
122 MI->addOperand(MachineOperand::CreateFPImm(Val));
123 } else if (RegClass == &WebAssembly::V128RegClass) {
124 unsigned TempReg = MRI.createVirtualRegister(&WebAssembly::I32RegClass);
125 MI->setDesc(TII->get(WebAssembly::SPLAT_v4i32));
126 MI->addOperand(MachineOperand::CreateReg(TempReg, false));
127 MachineInstr *Const = BuildMI(*MI->getParent(), MI, MI->getDebugLoc(),
128 TII->get(WebAssembly::CONST_I32), TempReg)
130 LIS.InsertMachineInstrInMaps(*Const);
132 llvm_unreachable("Unexpected reg class");
136 // Determine whether a call to the callee referenced by
137 // MI->getOperand(CalleeOpNo) reads memory, writes memory, and/or has side
139 static void QueryCallee(const MachineInstr &MI, unsigned CalleeOpNo, bool &Read,
140 bool &Write, bool &Effects, bool &StackPointer) {
141 // All calls can use the stack pointer.
144 const MachineOperand &MO = MI.getOperand(CalleeOpNo);
146 const Constant *GV = MO.getGlobal();
147 if (const GlobalAlias *GA = dyn_cast<GlobalAlias>(GV))
148 if (!GA->isInterposable())
149 GV = GA->getAliasee();
151 if (const Function *F = dyn_cast<Function>(GV)) {
152 if (!F->doesNotThrow())
154 if (F->doesNotAccessMemory())
156 if (F->onlyReadsMemory()) {
169 // Determine whether MI reads memory, writes memory, has side effects,
170 // and/or uses the stack pointer value.
171 static void Query(const MachineInstr &MI, AliasAnalysis &AA, bool &Read,
172 bool &Write, bool &Effects, bool &StackPointer) {
173 assert(!MI.isTerminator());
175 if (MI.isDebugInstr() || MI.isPosition())
179 if (MI.mayLoad() && !MI.isDereferenceableInvariantLoad(&AA))
185 } else if (MI.hasOrderedMemoryRef()) {
186 switch (MI.getOpcode()) {
187 case WebAssembly::DIV_S_I32:
188 case WebAssembly::DIV_S_I64:
189 case WebAssembly::REM_S_I32:
190 case WebAssembly::REM_S_I64:
191 case WebAssembly::DIV_U_I32:
192 case WebAssembly::DIV_U_I64:
193 case WebAssembly::REM_U_I32:
194 case WebAssembly::REM_U_I64:
195 case WebAssembly::I32_TRUNC_S_F32:
196 case WebAssembly::I64_TRUNC_S_F32:
197 case WebAssembly::I32_TRUNC_S_F64:
198 case WebAssembly::I64_TRUNC_S_F64:
199 case WebAssembly::I32_TRUNC_U_F32:
200 case WebAssembly::I64_TRUNC_U_F32:
201 case WebAssembly::I32_TRUNC_U_F64:
202 case WebAssembly::I64_TRUNC_U_F64:
203 // These instruction have hasUnmodeledSideEffects() returning true
204 // because they trap on overflow and invalid so they can't be arbitrarily
205 // moved, however hasOrderedMemoryRef() interprets this plus their lack
206 // of memoperands as having a potential unknown memory reference.
209 // Record volatile accesses, unless it's a call, as calls are handled
219 // Check for side effects.
220 if (MI.hasUnmodeledSideEffects()) {
221 switch (MI.getOpcode()) {
222 case WebAssembly::DIV_S_I32:
223 case WebAssembly::DIV_S_I64:
224 case WebAssembly::REM_S_I32:
225 case WebAssembly::REM_S_I64:
226 case WebAssembly::DIV_U_I32:
227 case WebAssembly::DIV_U_I64:
228 case WebAssembly::REM_U_I32:
229 case WebAssembly::REM_U_I64:
230 case WebAssembly::I32_TRUNC_S_F32:
231 case WebAssembly::I64_TRUNC_S_F32:
232 case WebAssembly::I32_TRUNC_S_F64:
233 case WebAssembly::I64_TRUNC_S_F64:
234 case WebAssembly::I32_TRUNC_U_F32:
235 case WebAssembly::I64_TRUNC_U_F32:
236 case WebAssembly::I32_TRUNC_U_F64:
237 case WebAssembly::I64_TRUNC_U_F64:
238 // These instructions have hasUnmodeledSideEffects() returning true
239 // because they trap on overflow and invalid so they can't be arbitrarily
240 // moved, however in the specific case of register stackifying, it is safe
241 // to move them because overflow and invalid are Undefined Behavior.
249 // Check for writes to __stack_pointer global.
250 if (MI.getOpcode() == WebAssembly::GLOBAL_SET_I32 &&
251 strcmp(MI.getOperand(0).getSymbolName(), "__stack_pointer") == 0)
256 unsigned CalleeOpNo = WebAssembly::getCalleeOpNo(MI);
257 QueryCallee(MI, CalleeOpNo, Read, Write, Effects, StackPointer);
261 // Test whether Def is safe and profitable to rematerialize.
262 static bool ShouldRematerialize(const MachineInstr &Def, AliasAnalysis &AA,
263 const WebAssemblyInstrInfo *TII) {
264 return Def.isAsCheapAsAMove() && TII->isTriviallyReMaterializable(Def, &AA);
267 // Identify the definition for this register at this point. This is a
268 // generalization of MachineRegisterInfo::getUniqueVRegDef that uses
269 // LiveIntervals to handle complex cases.
270 static MachineInstr *GetVRegDef(unsigned Reg, const MachineInstr *Insert,
271 const MachineRegisterInfo &MRI,
272 const LiveIntervals &LIS) {
273 // Most registers are in SSA form here so we try a quick MRI query first.
274 if (MachineInstr *Def = MRI.getUniqueVRegDef(Reg))
277 // MRI doesn't know what the Def is. Try asking LIS.
278 if (const VNInfo *ValNo = LIS.getInterval(Reg).getVNInfoBefore(
279 LIS.getInstructionIndex(*Insert)))
280 return LIS.getInstructionFromIndex(ValNo->def);
285 // Test whether Reg, as defined at Def, has exactly one use. This is a
286 // generalization of MachineRegisterInfo::hasOneUse that uses LiveIntervals
287 // to handle complex cases.
288 static bool HasOneUse(unsigned Reg, MachineInstr *Def, MachineRegisterInfo &MRI,
289 MachineDominatorTree &MDT, LiveIntervals &LIS) {
290 // Most registers are in SSA form here so we try a quick MRI query first.
291 if (MRI.hasOneUse(Reg))
295 const LiveInterval &LI = LIS.getInterval(Reg);
296 const VNInfo *DefVNI =
297 LI.getVNInfoAt(LIS.getInstructionIndex(*Def).getRegSlot());
299 for (auto &I : MRI.use_nodbg_operands(Reg)) {
300 const auto &Result = LI.Query(LIS.getInstructionIndex(*I.getParent()));
301 if (Result.valueIn() == DefVNI) {
302 if (!Result.isKill())
312 // Test whether it's safe to move Def to just before Insert.
313 // TODO: Compute memory dependencies in a way that doesn't require always
314 // walking the block.
315 // TODO: Compute memory dependencies in a way that uses AliasAnalysis to be
317 static bool IsSafeToMove(const MachineInstr *Def, const MachineInstr *Insert,
318 AliasAnalysis &AA, const MachineRegisterInfo &MRI) {
319 assert(Def->getParent() == Insert->getParent());
321 // Check for register dependencies.
322 SmallVector<unsigned, 4> MutableRegisters;
323 for (const MachineOperand &MO : Def->operands()) {
324 if (!MO.isReg() || MO.isUndef())
326 unsigned Reg = MO.getReg();
328 // If the register is dead here and at Insert, ignore it.
329 if (MO.isDead() && Insert->definesRegister(Reg) &&
330 !Insert->readsRegister(Reg))
333 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
334 // Ignore ARGUMENTS; it's just used to keep the ARGUMENT_* instructions
335 // from moving down, and we've already checked for that.
336 if (Reg == WebAssembly::ARGUMENTS)
338 // If the physical register is never modified, ignore it.
339 if (!MRI.isPhysRegModified(Reg))
341 // Otherwise, it's a physical register with unknown liveness.
345 // If one of the operands isn't in SSA form, it has different values at
346 // different times, and we need to make sure we don't move our use across
348 if (!MO.isDef() && !MRI.hasOneDef(Reg))
349 MutableRegisters.push_back(Reg);
352 bool Read = false, Write = false, Effects = false, StackPointer = false;
353 Query(*Def, AA, Read, Write, Effects, StackPointer);
355 // If the instruction does not access memory and has no side effects, it has
356 // no additional dependencies.
357 bool HasMutableRegisters = !MutableRegisters.empty();
358 if (!Read && !Write && !Effects && !StackPointer && !HasMutableRegisters)
361 // Scan through the intervening instructions between Def and Insert.
362 MachineBasicBlock::const_iterator D(Def), I(Insert);
363 for (--I; I != D; --I) {
364 bool InterveningRead = false;
365 bool InterveningWrite = false;
366 bool InterveningEffects = false;
367 bool InterveningStackPointer = false;
368 Query(*I, AA, InterveningRead, InterveningWrite, InterveningEffects,
369 InterveningStackPointer);
370 if (Effects && InterveningEffects)
372 if (Read && InterveningWrite)
374 if (Write && (InterveningRead || InterveningWrite))
376 if (StackPointer && InterveningStackPointer)
379 for (unsigned Reg : MutableRegisters)
380 for (const MachineOperand &MO : I->operands())
381 if (MO.isReg() && MO.isDef() && MO.getReg() == Reg)
388 /// Test whether OneUse, a use of Reg, dominates all of Reg's other uses.
389 static bool OneUseDominatesOtherUses(unsigned Reg, const MachineOperand &OneUse,
390 const MachineBasicBlock &MBB,
391 const MachineRegisterInfo &MRI,
392 const MachineDominatorTree &MDT,
394 WebAssemblyFunctionInfo &MFI) {
395 const LiveInterval &LI = LIS.getInterval(Reg);
397 const MachineInstr *OneUseInst = OneUse.getParent();
398 VNInfo *OneUseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*OneUseInst));
400 for (const MachineOperand &Use : MRI.use_nodbg_operands(Reg)) {
404 const MachineInstr *UseInst = Use.getParent();
405 VNInfo *UseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*UseInst));
407 if (UseVNI != OneUseVNI)
410 if (UseInst == OneUseInst) {
411 // Another use in the same instruction. We need to ensure that the one
412 // selected use happens "before" it.
416 // Test that the use is dominated by the one selected use.
417 while (!MDT.dominates(OneUseInst, UseInst)) {
418 // Actually, dominating is over-conservative. Test that the use would
419 // happen after the one selected use in the stack evaluation order.
421 // This is needed as a consequence of using implicit local.gets for
422 // uses and implicit local.sets for defs.
423 if (UseInst->getDesc().getNumDefs() == 0)
425 const MachineOperand &MO = UseInst->getOperand(0);
428 unsigned DefReg = MO.getReg();
429 if (!TargetRegisterInfo::isVirtualRegister(DefReg) ||
430 !MFI.isVRegStackified(DefReg))
432 assert(MRI.hasOneNonDBGUse(DefReg));
433 const MachineOperand &NewUse = *MRI.use_nodbg_begin(DefReg);
434 const MachineInstr *NewUseInst = NewUse.getParent();
435 if (NewUseInst == OneUseInst) {
436 if (&OneUse > &NewUse)
440 UseInst = NewUseInst;
447 /// Get the appropriate tee opcode for the given register class.
448 static unsigned GetTeeOpcode(const TargetRegisterClass *RC) {
449 if (RC == &WebAssembly::I32RegClass)
450 return WebAssembly::TEE_I32;
451 if (RC == &WebAssembly::I64RegClass)
452 return WebAssembly::TEE_I64;
453 if (RC == &WebAssembly::F32RegClass)
454 return WebAssembly::TEE_F32;
455 if (RC == &WebAssembly::F64RegClass)
456 return WebAssembly::TEE_F64;
457 if (RC == &WebAssembly::V128RegClass)
458 return WebAssembly::TEE_V128;
459 llvm_unreachable("Unexpected register class");
462 // Shrink LI to its uses, cleaning up LI.
463 static void ShrinkToUses(LiveInterval &LI, LiveIntervals &LIS) {
464 if (LIS.shrinkToUses(&LI)) {
465 SmallVector<LiveInterval *, 4> SplitLIs;
466 LIS.splitSeparateComponents(LI, SplitLIs);
470 /// A single-use def in the same block with no intervening memory or register
471 /// dependencies; move the def down and nest it with the current instruction.
472 static MachineInstr *MoveForSingleUse(unsigned Reg, MachineOperand &Op,
473 MachineInstr *Def, MachineBasicBlock &MBB,
474 MachineInstr *Insert, LiveIntervals &LIS,
475 WebAssemblyFunctionInfo &MFI,
476 MachineRegisterInfo &MRI) {
477 LLVM_DEBUG(dbgs() << "Move for single use: "; Def->dump());
479 WebAssemblyDebugValueManager DefDIs(Def);
480 MBB.splice(Insert, &MBB, Def);
482 LIS.handleMove(*Def);
484 if (MRI.hasOneDef(Reg) && MRI.hasOneUse(Reg)) {
485 // No one else is using this register for anything so we can just stackify
487 MFI.stackifyVReg(Reg);
489 // The register may have unrelated uses or defs; create a new register for
490 // just our one def and use so that we can stackify it.
491 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
492 Def->getOperand(0).setReg(NewReg);
495 // Tell LiveIntervals about the new register.
496 LIS.createAndComputeVirtRegInterval(NewReg);
498 // Tell LiveIntervals about the changes to the old register.
499 LiveInterval &LI = LIS.getInterval(Reg);
500 LI.removeSegment(LIS.getInstructionIndex(*Def).getRegSlot(),
501 LIS.getInstructionIndex(*Op.getParent()).getRegSlot(),
502 /*RemoveDeadValNo=*/true);
504 MFI.stackifyVReg(NewReg);
506 DefDIs.updateReg(NewReg);
508 LLVM_DEBUG(dbgs() << " - Replaced register: "; Def->dump());
511 ImposeStackOrdering(Def);
515 /// A trivially cloneable instruction; clone it and nest the new copy with the
516 /// current instruction.
517 static MachineInstr *RematerializeCheapDef(
518 unsigned Reg, MachineOperand &Op, MachineInstr &Def, MachineBasicBlock &MBB,
519 MachineBasicBlock::instr_iterator Insert, LiveIntervals &LIS,
520 WebAssemblyFunctionInfo &MFI, MachineRegisterInfo &MRI,
521 const WebAssemblyInstrInfo *TII, const WebAssemblyRegisterInfo *TRI) {
522 LLVM_DEBUG(dbgs() << "Rematerializing cheap def: "; Def.dump());
523 LLVM_DEBUG(dbgs() << " - for use in "; Op.getParent()->dump());
525 WebAssemblyDebugValueManager DefDIs(&Def);
527 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
528 TII->reMaterialize(MBB, Insert, NewReg, 0, Def, *TRI);
530 MachineInstr *Clone = &*std::prev(Insert);
531 LIS.InsertMachineInstrInMaps(*Clone);
532 LIS.createAndComputeVirtRegInterval(NewReg);
533 MFI.stackifyVReg(NewReg);
534 ImposeStackOrdering(Clone);
536 LLVM_DEBUG(dbgs() << " - Cloned to "; Clone->dump());
538 // Shrink the interval.
539 bool IsDead = MRI.use_empty(Reg);
541 LiveInterval &LI = LIS.getInterval(Reg);
542 ShrinkToUses(LI, LIS);
543 IsDead = !LI.liveAt(LIS.getInstructionIndex(Def).getDeadSlot());
546 // If that was the last use of the original, delete the original.
547 // Move or clone corresponding DBG_VALUEs to the 'Insert' location.
549 LLVM_DEBUG(dbgs() << " - Deleting original\n");
550 SlotIndex Idx = LIS.getInstructionIndex(Def).getRegSlot();
551 LIS.removePhysRegDefAt(WebAssembly::ARGUMENTS, Idx);
552 LIS.removeInterval(Reg);
553 LIS.RemoveMachineInstrFromMaps(Def);
554 Def.eraseFromParent();
556 DefDIs.move(&*Insert);
557 DefDIs.updateReg(NewReg);
559 DefDIs.clone(&*Insert, NewReg);
565 /// A multiple-use def in the same block with no intervening memory or register
566 /// dependencies; move the def down, nest it with the current instruction, and
567 /// insert a tee to satisfy the rest of the uses. As an illustration, rewrite
570 /// Reg = INST ... // Def
571 /// INST ..., Reg, ... // Insert
572 /// INST ..., Reg, ...
573 /// INST ..., Reg, ...
577 /// DefReg = INST ... // Def (to become the new Insert)
578 /// TeeReg, Reg = TEE_... DefReg
579 /// INST ..., TeeReg, ... // Insert
580 /// INST ..., Reg, ...
581 /// INST ..., Reg, ...
583 /// with DefReg and TeeReg stackified. This eliminates a local.get from the
585 static MachineInstr *MoveAndTeeForMultiUse(
586 unsigned Reg, MachineOperand &Op, MachineInstr *Def, MachineBasicBlock &MBB,
587 MachineInstr *Insert, LiveIntervals &LIS, WebAssemblyFunctionInfo &MFI,
588 MachineRegisterInfo &MRI, const WebAssemblyInstrInfo *TII) {
589 LLVM_DEBUG(dbgs() << "Move and tee for multi-use:"; Def->dump());
591 WebAssemblyDebugValueManager DefDIs(Def);
593 // Move Def into place.
594 MBB.splice(Insert, &MBB, Def);
595 LIS.handleMove(*Def);
597 // Create the Tee and attach the registers.
598 const auto *RegClass = MRI.getRegClass(Reg);
599 unsigned TeeReg = MRI.createVirtualRegister(RegClass);
600 unsigned DefReg = MRI.createVirtualRegister(RegClass);
601 MachineOperand &DefMO = Def->getOperand(0);
602 MachineInstr *Tee = BuildMI(MBB, Insert, Insert->getDebugLoc(),
603 TII->get(GetTeeOpcode(RegClass)), TeeReg)
604 .addReg(Reg, RegState::Define)
605 .addReg(DefReg, getUndefRegState(DefMO.isDead()));
607 DefMO.setReg(DefReg);
608 SlotIndex TeeIdx = LIS.InsertMachineInstrInMaps(*Tee).getRegSlot();
609 SlotIndex DefIdx = LIS.getInstructionIndex(*Def).getRegSlot();
613 // Tell LiveIntervals we moved the original vreg def from Def to Tee.
614 LiveInterval &LI = LIS.getInterval(Reg);
615 LiveInterval::iterator I = LI.FindSegmentContaining(DefIdx);
616 VNInfo *ValNo = LI.getVNInfoAt(DefIdx);
619 ShrinkToUses(LI, LIS);
621 // Finish stackifying the new regs.
622 LIS.createAndComputeVirtRegInterval(TeeReg);
623 LIS.createAndComputeVirtRegInterval(DefReg);
624 MFI.stackifyVReg(DefReg);
625 MFI.stackifyVReg(TeeReg);
626 ImposeStackOrdering(Def);
627 ImposeStackOrdering(Tee);
629 DefDIs.clone(Tee, DefReg);
630 DefDIs.clone(Insert, TeeReg);
632 LLVM_DEBUG(dbgs() << " - Replaced register: "; Def->dump());
633 LLVM_DEBUG(dbgs() << " - Tee instruction: "; Tee->dump());
638 /// A stack for walking the tree of instructions being built, visiting the
639 /// MachineOperands in DFS order.
640 class TreeWalkerState {
641 typedef MachineInstr::mop_iterator mop_iterator;
642 typedef std::reverse_iterator<mop_iterator> mop_reverse_iterator;
643 typedef iterator_range<mop_reverse_iterator> RangeTy;
644 SmallVector<RangeTy, 4> Worklist;
647 explicit TreeWalkerState(MachineInstr *Insert) {
648 const iterator_range<mop_iterator> &Range = Insert->explicit_uses();
649 if (Range.begin() != Range.end())
650 Worklist.push_back(reverse(Range));
653 bool Done() const { return Worklist.empty(); }
655 MachineOperand &Pop() {
656 RangeTy &Range = Worklist.back();
657 MachineOperand &Op = *Range.begin();
658 Range = drop_begin(Range, 1);
659 if (Range.begin() == Range.end())
661 assert((Worklist.empty() ||
662 Worklist.back().begin() != Worklist.back().end()) &&
663 "Empty ranges shouldn't remain in the worklist");
667 /// Push Instr's operands onto the stack to be visited.
668 void PushOperands(MachineInstr *Instr) {
669 const iterator_range<mop_iterator> &Range(Instr->explicit_uses());
670 if (Range.begin() != Range.end())
671 Worklist.push_back(reverse(Range));
674 /// Some of Instr's operands are on the top of the stack; remove them and
675 /// re-insert them starting from the beginning (because we've commuted them).
676 void ResetTopOperands(MachineInstr *Instr) {
677 assert(HasRemainingOperands(Instr) &&
678 "Reseting operands should only be done when the instruction has "
679 "an operand still on the stack");
680 Worklist.back() = reverse(Instr->explicit_uses());
683 /// Test whether Instr has operands remaining to be visited at the top of
685 bool HasRemainingOperands(const MachineInstr *Instr) const {
686 if (Worklist.empty())
688 const RangeTy &Range = Worklist.back();
689 return Range.begin() != Range.end() && Range.begin()->getParent() == Instr;
692 /// Test whether the given register is present on the stack, indicating an
693 /// operand in the tree that we haven't visited yet. Moving a definition of
694 /// Reg to a point in the tree after that would change its value.
696 /// This is needed as a consequence of using implicit local.gets for
697 /// uses and implicit local.sets for defs.
698 bool IsOnStack(unsigned Reg) const {
699 for (const RangeTy &Range : Worklist)
700 for (const MachineOperand &MO : Range)
701 if (MO.isReg() && MO.getReg() == Reg)
707 /// State to keep track of whether commuting is in flight or whether it's been
708 /// tried for the current instruction and didn't work.
709 class CommutingState {
710 /// There are effectively three states: the initial state where we haven't
711 /// started commuting anything and we don't know anything yet, the tentative
712 /// state where we've commuted the operands of the current instruction and are
713 /// revisiting it, and the declined state where we've reverted the operands
714 /// back to their original order and will no longer commute it further.
715 bool TentativelyCommuting;
718 /// During the tentative state, these hold the operand indices of the commuted
720 unsigned Operand0, Operand1;
723 CommutingState() : TentativelyCommuting(false), Declined(false) {}
725 /// Stackification for an operand was not successful due to ordering
726 /// constraints. If possible, and if we haven't already tried it and declined
727 /// it, commute Insert's operands and prepare to revisit it.
728 void MaybeCommute(MachineInstr *Insert, TreeWalkerState &TreeWalker,
729 const WebAssemblyInstrInfo *TII) {
730 if (TentativelyCommuting) {
732 "Don't decline commuting until you've finished trying it");
733 // Commuting didn't help. Revert it.
734 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
735 TentativelyCommuting = false;
737 } else if (!Declined && TreeWalker.HasRemainingOperands(Insert)) {
738 Operand0 = TargetInstrInfo::CommuteAnyOperandIndex;
739 Operand1 = TargetInstrInfo::CommuteAnyOperandIndex;
740 if (TII->findCommutedOpIndices(*Insert, Operand0, Operand1)) {
741 // Tentatively commute the operands and try again.
742 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
743 TreeWalker.ResetTopOperands(Insert);
744 TentativelyCommuting = true;
750 /// Stackification for some operand was successful. Reset to the default
753 TentativelyCommuting = false;
757 } // end anonymous namespace
759 bool WebAssemblyRegStackify::runOnMachineFunction(MachineFunction &MF) {
760 LLVM_DEBUG(dbgs() << "********** Register Stackifying **********\n"
761 "********** Function: "
762 << MF.getName() << '\n');
764 bool Changed = false;
765 MachineRegisterInfo &MRI = MF.getRegInfo();
766 WebAssemblyFunctionInfo &MFI = *MF.getInfo<WebAssemblyFunctionInfo>();
767 const auto *TII = MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo();
768 const auto *TRI = MF.getSubtarget<WebAssemblySubtarget>().getRegisterInfo();
769 AliasAnalysis &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
770 MachineDominatorTree &MDT = getAnalysis<MachineDominatorTree>();
771 LiveIntervals &LIS = getAnalysis<LiveIntervals>();
773 // Walk the instructions from the bottom up. Currently we don't look past
774 // block boundaries, and the blocks aren't ordered so the block visitation
775 // order isn't significant, but we may want to change this in the future.
776 for (MachineBasicBlock &MBB : MF) {
777 // Don't use a range-based for loop, because we modify the list as we're
778 // iterating over it and the end iterator may change.
779 for (auto MII = MBB.rbegin(); MII != MBB.rend(); ++MII) {
780 MachineInstr *Insert = &*MII;
781 // Don't nest anything inside an inline asm, because we don't have
782 // constraints for $push inputs.
783 if (Insert->getOpcode() == TargetOpcode::INLINEASM)
786 // Ignore debugging intrinsics.
787 if (Insert->getOpcode() == TargetOpcode::DBG_VALUE)
790 // Iterate through the inputs in reverse order, since we'll be pulling
791 // operands off the stack in LIFO order.
792 CommutingState Commuting;
793 TreeWalkerState TreeWalker(Insert);
794 while (!TreeWalker.Done()) {
795 MachineOperand &Op = TreeWalker.Pop();
797 // We're only interested in explicit virtual register operands.
801 unsigned Reg = Op.getReg();
802 assert(Op.isUse() && "explicit_uses() should only iterate over uses");
803 assert(!Op.isImplicit() &&
804 "explicit_uses() should only iterate over explicit operands");
805 if (TargetRegisterInfo::isPhysicalRegister(Reg))
808 // Identify the definition for this register at this point.
809 MachineInstr *Def = GetVRegDef(Reg, Insert, MRI, LIS);
813 // Don't nest an INLINE_ASM def into anything, because we don't have
814 // constraints for $pop outputs.
815 if (Def->getOpcode() == TargetOpcode::INLINEASM)
818 // Argument instructions represent live-in registers and not real
820 if (WebAssembly::isArgument(*Def))
823 // Decide which strategy to take. Prefer to move a single-use value
824 // over cloning it, and prefer cloning over introducing a tee.
825 // For moving, we require the def to be in the same block as the use;
826 // this makes things simpler (LiveIntervals' handleMove function only
827 // supports intra-block moves) and it's MachineSink's job to catch all
828 // the sinking opportunities anyway.
829 bool SameBlock = Def->getParent() == &MBB;
830 bool CanMove = SameBlock && IsSafeToMove(Def, Insert, AA, MRI) &&
831 !TreeWalker.IsOnStack(Reg);
832 if (CanMove && HasOneUse(Reg, Def, MRI, MDT, LIS)) {
833 Insert = MoveForSingleUse(Reg, Op, Def, MBB, Insert, LIS, MFI, MRI);
834 } else if (ShouldRematerialize(*Def, AA, TII)) {
836 RematerializeCheapDef(Reg, Op, *Def, MBB, Insert->getIterator(),
837 LIS, MFI, MRI, TII, TRI);
838 } else if (CanMove &&
839 OneUseDominatesOtherUses(Reg, Op, MBB, MRI, MDT, LIS, MFI)) {
840 Insert = MoveAndTeeForMultiUse(Reg, Op, Def, MBB, Insert, LIS, MFI,
843 // We failed to stackify the operand. If the problem was ordering
844 // constraints, Commuting may be able to help.
845 if (!CanMove && SameBlock)
846 Commuting.MaybeCommute(Insert, TreeWalker, TII);
847 // Proceed to the next operand.
851 // If the instruction we just stackified is an IMPLICIT_DEF, convert it
852 // to a constant 0 so that the def is explicit, and the push/pop
853 // correspondence is maintained.
854 if (Insert->getOpcode() == TargetOpcode::IMPLICIT_DEF)
855 ConvertImplicitDefToConstZero(Insert, MRI, TII, MF, LIS);
857 // We stackified an operand. Add the defining instruction's operands to
858 // the worklist stack now to continue to build an ever deeper tree.
860 TreeWalker.PushOperands(Insert);
863 // If we stackified any operands, skip over the tree to start looking for
864 // the next instruction we can build a tree on.
865 if (Insert != &*MII) {
866 ImposeStackOrdering(&*MII);
867 MII = MachineBasicBlock::iterator(Insert).getReverse();
873 // If we used VALUE_STACK anywhere, add it to the live-in sets everywhere so
874 // that it never looks like a use-before-def.
876 MF.getRegInfo().addLiveIn(WebAssembly::VALUE_STACK);
877 for (MachineBasicBlock &MBB : MF)
878 MBB.addLiveIn(WebAssembly::VALUE_STACK);
882 // Verify that pushes and pops are performed in LIFO order.
883 SmallVector<unsigned, 0> Stack;
884 for (MachineBasicBlock &MBB : MF) {
885 for (MachineInstr &MI : MBB) {
886 if (MI.isDebugInstr())
888 for (MachineOperand &MO : reverse(MI.explicit_operands())) {
891 unsigned Reg = MO.getReg();
893 if (MFI.isVRegStackified(Reg)) {
895 Stack.push_back(Reg);
897 assert(Stack.pop_back_val() == Reg &&
898 "Register stack pop should be paired with a push");
902 // TODO: Generalize this code to support keeping values on the stack across
903 // basic block boundaries.
904 assert(Stack.empty() &&
905 "Register stack pushes and pops should be balanced");