Bug 670632: Add look-ahead and look-behind register hinting to linear scan register allocator. r=dvander
authorAndrew Drake <adrake@adrake.org>
Tue, 12 Jul 2011 16:44:24 -0700
changeset 73503 0d79db47e6fa0e148614aba0dc24a37d3339ce3f
parent 73502 6aba51ebc1b9c3670fcbaf18d81ce9938619f316
child 73504 7bd4ed0a22856f5c1d8115b265dc1d60293d6c53
push id34
push userdrakedevel@gmail.com
push dateThu, 28 Jul 2011 00:46:46 +0000
reviewersdvander
bugs670632
milestone8.0a1
Bug 670632: Add look-ahead and look-behind register hinting to linear scan register allocator. r=dvander
js/src/ion/LinearScan.cpp
js/src/ion/LinearScan.h
--- a/js/src/ion/LinearScan.cpp
+++ b/js/src/ion/LinearScan.cpp
@@ -209,32 +209,44 @@ VirtualRegister::intervalFor(CodePositio
 
 LiveInterval *
 VirtualRegister::getFirstInterval()
 {
     JS_ASSERT(!intervals_.empty());
     return intervals_[0];
 }
 
+LOperand *
+VirtualRegister::nextUseAfter(CodePosition after)
+{
+    LOperand *min = NULL;
+    CodePosition minPos = CodePosition::MAX;
+    for (LOperand *i = uses_.begin(); i != uses_.end(); i++) {
+        CodePosition ip(i->ins->id(), CodePosition::INPUT);
+        if (i->use->policy() != LUse::KEEPALIVE && ip >= after && ip < minPos) {
+            min = i;
+            minPos = ip;
+        }
+    }
+    return min;
+}
+
 /*
  * This function locates the first "real" use of the callee virtual register
  * that follows the given code position. Non-"real" uses are currently just
  * snapshots, which keep virtual registers alive but do not result in the
  * generation of code that use them.
  */
 CodePosition
-VirtualRegister::nextUseAfter(CodePosition after)
+VirtualRegister::nextUsePosAfter(CodePosition after)
 {
-    CodePosition min = CodePosition::MAX;
-    for (LOperand *i = uses_.begin(); i != uses_.end(); i++) {
-        CodePosition ip(i->ins->id(), CodePosition::INPUT);
-        if (i->use->policy() != LUse::KEEPALIVE && ip >= after && ip < min)
-            min = ip;
-    }
-    return min;
+    LOperand *min = nextUseAfter(after);
+    if (min)
+        return CodePosition(min->ins->id(), CodePosition::INPUT);
+    return CodePosition::MAX;
 }
 
 /*
  * This function finds the position of the next use of the callee virtual
  * register that is incompatible with the provideded allocation. For example,
  * a use with a REGISTER policy would be incompatible with a stack slot
  * allocation.
  */
@@ -547,17 +559,17 @@ LinearScanAllocator::allocateRegisters()
                 if (!assign(*inputInterval->getAllocation()))
                     return false;
                 continue;
             }
             if (policy == LDefinition::DEFAULT)
                 mustHaveRegister = !current->reg()->ins()->isPhi();
             else
                 JS_ASSERT(policy == LDefinition::REDEFINED);
-        } else {
+        } else if (position.subpos() == CodePosition::INPUT) {
             // Scan uses for any at the current instruction
             LOperand *fixedOp = NULL;
             for (size_t i = 0; i < current->reg()->numUses(); i++) {
                 LOperand *op = current->reg()->getUse(i);
                 if (op->ins->id() == position.ins()) {
                     LUse::Policy pol = op->use->policy();
                     if (pol == LUse::FIXED) {
                         IonSpew(IonSpew_LSRA, " Use has fixed policy.");
@@ -576,16 +588,23 @@ LinearScanAllocator::allocateRegisters()
             if (fixedOp) {
                 current->setFlag(LiveInterval::FIXED);
                 if (!assign(LGeneralReg(Register::FromCode(fixedOp->use->registerCode()))))
                     return false;
                 continue;
             }
         }
 
+        // Find the first use of this register
+        firstUse = current->reg()->nextUseAfter(position);
+        if (firstUse)
+            firstUsePos = inputOf(firstUse->ins);
+        else
+            firstUsePos = CodePosition::MAX;
+
         // Try to allocate a free register
         IonSpew(IonSpew_LSRA, " Attempting free register allocation");
 
         // Find the best register
         Register best = findBestFreeRegister();
         IonSpew(IonSpew_LSRA, "  Decided best register was %u, free until %u", best.code(),
                 freeUntilPos[best.code()].pos());
 
@@ -604,28 +623,27 @@ LinearScanAllocator::allocateRegisters()
         // We need to allocate a blocked register
         IonSpew(IonSpew_LSRA, "  Unable to allocate free register");
         IonSpew(IonSpew_LSRA, " Attempting blocked register allocation");
 
         // Find the best register
         best = findBestBlockedRegister();
 
         // Figure out whether to spill the blocker or ourselves
-        CodePosition firstUse = current->reg()->nextUseAfter(position);
-        IonSpew(IonSpew_LSRA, "  Current interval next used at %u", firstUse.pos());
+        IonSpew(IonSpew_LSRA, "  Current interval next used at %u", firstUsePos.pos());
 
         // Deal with constraints
         if (mustHaveRegister) {
             IonSpew(IonSpew_LSRA, "   But this interval needs a register.");
-            firstUse = position;
+            firstUsePos = position;
         }
 
         // We only spill the blocking interval if it is a strict improvement
         // to do so, otherwise we will continuously re-spill intervals.
-        if (firstUse >= nextUsePos[best.code()]) {
+        if (firstUsePos >= nextUsePos[best.code()]) {
             if (!spill())
                 return false;
         } else {
             if (!assign(LGeneralReg(best)))
                 return false;
         }
     }
 
@@ -1027,16 +1045,34 @@ LinearScanAllocator::findBestFreeRegiste
     Register best = Register::FromCode(0);
     for (uint32 i = 0; i < Registers::Total; i++) {
         Register reg = Register::FromCode(i);
         freeRegs.add(reg);
         if (freeUntilPos[i] > freeUntilPos[best.code()])
             best = reg;
     }
 
+    // As an optimization, use the allocation from the previous interval if it
+    // is available.
+    if (current->index()) {
+        LiveInterval *previous = current->reg()->getInterval(current->index() - 1);
+        if (previous->getAllocation()->isGeneralReg()) {
+            Register prevReg = previous->getAllocation()->toGeneralReg()->reg();
+            if (freeUntilPos[prevReg.code()] != CodePosition::MIN)
+                best = prevReg;
+        }
+    }
+
+    // Alternately, use the upcoming fixed register if it is available.
+    if (firstUse && firstUse->use->policy() == LUse::FIXED) {
+        uint32 fixedReg = firstUse->use->registerCode();
+        if (freeUntilPos[fixedReg] != CodePosition::MIN)
+            best = Register::FromCode(fixedReg);
+    }
+
     return best;
 }
 
 /*
  * This function locates a register which is assigned to an active interval,
  * and returns the one with the furthest away next use. As a side effect,
  * the nextUsePos array is updated with the next use position of all active
  * intervals for use elsewhere in the algorithm.
@@ -1050,31 +1086,31 @@ LinearScanAllocator::findBestBlockedRegi
         if (allowedRegs.has(Register::FromCode(i)))
             nextUsePos[i] = CodePosition::MAX;
         else
             nextUsePos[i] = CodePosition::MIN;
     }
     for (IntervalIterator i(active.begin()); i != active.end(); i++) {
         if (i->getAllocation()->isGeneralReg()) {
             Register reg = i->getAllocation()->toGeneralReg()->reg();
-            CodePosition nextUse = i->reg()->nextUseAfter(current->start());
+            CodePosition nextUse = i->reg()->nextUsePosAfter(current->start());
             IonSpew(IonSpew_LSRA, "   Register %u next used %u", reg.code(), nextUse.pos());
 
             if (i->start() == current->start()) {
                 IonSpew(IonSpew_LSRA, "    Disqualifying due to recency.");
                 nextUsePos[reg.code()] = current->start();
             } else {
                 nextUsePos[reg.code()] = nextUse;
             }
         }
     }
     for (IntervalIterator i(inactive.begin()); i != inactive.end(); i++) {
         if (i->getAllocation()->isGeneralReg()) {
             Register reg = i->getAllocation()->toGeneralReg()->reg();
-            CodePosition pos = i->reg()->nextUseAfter(current->start());
+            CodePosition pos = i->reg()->nextUsePosAfter(current->start());
             JS_ASSERT(i->covers(pos) || i->end() == pos);
             if (pos < nextUsePos[reg.code()]) {
                 nextUsePos[reg.code()] = pos;
                 IonSpew(IonSpew_LSRA, "   Register %u next used %u", reg.code(), pos.pos());
             }
         }
     }
 
@@ -1146,16 +1182,18 @@ LinearScanAllocator::getMoveGroupBefore(
         return NULL;
     }
 }
 
 bool
 LinearScanAllocator::moveBefore(CodePosition pos, LiveInterval *from, LiveInterval *to)
 {
     LMoveGroup *moves = getMoveGroupBefore(pos);
+    if (*from->getAllocation() == *to->getAllocation())
+        return true;
     return moves->add(from->getAllocation(), to->getAllocation());
 }
 
 #ifdef DEBUG
 /*
  * Ensure intervals appear in exactly the appropriate one of {active,inactive,
  * handled}, and that active and inactive intervals do not conflict. Handled
  * intervals are checked for conflicts in validateAllocations for performance
--- a/js/src/ion/LinearScan.h
+++ b/js/src/ion/LinearScan.h
@@ -340,17 +340,18 @@ class VirtualRegister : public TempObjec
     void setOutputMoves(LMoveGroup *moves) {
         outputMoves_ = moves;
     }
     LMoveGroup *outputMoves() {
         return outputMoves_;
     }
 
     LiveInterval *intervalFor(CodePosition pos);
-    CodePosition nextUseAfter(CodePosition pos);
+    LOperand *nextUseAfter(CodePosition pos);
+    CodePosition nextUsePosAfter(CodePosition pos);
     CodePosition nextIncompatibleUseAfter(CodePosition after, LAllocation alloc);
     LiveInterval *getFirstInterval();
 };
 
 class VirtualRegisterMap
 {
   private:
     VirtualRegister *vregs_;
@@ -428,16 +429,18 @@ class LinearScanAllocator
     UnhandledQueue unhandled;
     InlineList<LiveInterval> active;
     InlineList<LiveInterval> inactive;
     InlineList<LiveInterval> handled;
     RegisterSet freeRegs;
     CodePosition *freeUntilPos;
     CodePosition *nextUsePos;
     LiveInterval *current;
+    LOperand *firstUse;
+    CodePosition firstUsePos;
 
     bool createDataStructures();
     bool buildLivenessInfo();
     bool allocateRegisters();
     bool resolveControlFlow();
     bool reifyAllocations();
 
     bool splitInterval(LiveInterval *interval, CodePosition pos);
@@ -469,17 +472,18 @@ class LinearScanAllocator
     }
     CodePosition inputOf(LInstruction *ins) {
         return CodePosition(ins->id(), CodePosition::INPUT);
     }
 
   public:
     LinearScanAllocator(LIRGenerator *lir, LIRGraph &graph)
       : lir(lir),
-        graph(graph)
+        graph(graph),
+        firstUsePos(CodePosition::MAX)
     { }
 
     bool go();
 
 };
 
 }
 }