Bug 670628: Eliminate heap-allocated free-until and next-use mappings. r=dvander
authorAndrew Drake <adrake@adrake.org>
Fri, 12 Aug 2011 13:36:23 -0700
changeset 108726 8d3e5e4be9333e5bc4aace2ead98595c4d2b095e
parent 108725 d6c0c813dd80fb12865a8d9874aea6676697e91d
child 108727 1e2078273debc9d4c3bc0df278fe566c634cd789
push id2248
push userakeybl@mozilla.com
push dateMon, 08 Oct 2012 19:23:44 +0000
treeherdermozilla-aurora@118a3b748323 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdvander
bugs670628
milestone8.0a1
Bug 670628: Eliminate heap-allocated free-until and next-use mappings. r=dvander
js/src/ion/LinearScan.cpp
js/src/ion/LinearScan.h
--- a/js/src/ion/LinearScan.cpp
+++ b/js/src/ion/LinearScan.cpp
@@ -333,19 +333,17 @@ const CodePosition CodePosition::MIN(0);
  * to avoid littering the algorithms with memory management cruft.
  */
 bool
 LinearScanAllocator::createDataStructures()
 {
     allowedRegs = RegisterSet::All();
 
     liveIn = lir->mir()->allocate<BitSet*>(graph.numBlockIds());
-    freeUntilPos = lir->mir()->allocate<CodePosition>(Registers::Total);
-    nextUsePos = lir->mir()->allocate<CodePosition>(Registers::Total);
-    if (!liveIn || !freeUntilPos || !nextUsePos)
+    if (!liveIn)
         return false;
 
     if (!vregs.init(lir->mir(), graph.numVirtualRegisters()))
         return false;
 
     // Build virtual register objects
     for (size_t i = 0; i < graph.numBlocks(); i++) {
         LBlock *block = graph.getBlock(i);
@@ -704,30 +702,30 @@ LinearScanAllocator::allocateRegisters()
             if (!spill())
                 return false;
             continue;
         }
 
         // Try to allocate a free register
         IonSpew(IonSpew_LSRA, " Attempting free register allocation");
 
-        // Find the best register
-        Register best = findBestFreeRegister();
-        IonSpew(IonSpew_LSRA, "  Decided best register was %s, free until %u", best.name(),
-                freeUntilPos[best.code()].pos());
+        CodePosition bestFreeUntil;
+        Register::Code bestCode = findBestFreeRegister(&bestFreeUntil);
+        if (bestCode != Register::Codes::Invalid) {
+            Register best = Register::FromCode(bestCode);
+            IonSpew(IonSpew_LSRA, "  Decided best register was %s", best.name());
 
-        // Attempt to allocate
-        if (freeUntilPos[best.code()] != CodePosition::MIN) {
             // Split when the register is next needed if necessary
-            if (freeUntilPos[best.code()] <= current->end()) {
-                if (!splitInterval(current, freeUntilPos[best.code()]))
+            if (bestFreeUntil <= current->end()) {
+                if (!splitInterval(current, bestFreeUntil))
                     return false;
             }
             if (!assign(LGeneralReg(best)))
                 return false;
+
             continue;
         }
 
         IonSpew(IonSpew_LSRA, "  Unable to allocate free register");
 
         // We may need to allocate a blocked register
         if (!canSpillOthers) {
             IonSpew(IonSpew_LSRA, " Can't spill any other intervals, spilling this one");
@@ -736,24 +734,35 @@ LinearScanAllocator::allocateRegisters()
             continue;
         }
 
         IonSpew(IonSpew_LSRA, " Attempting blocked register allocation");
 
         // If we absolutely need a register or our next use is closer than the
         // selected blocking register then we spill the blocker. Otherwise, we
         // spill the current interval.
-        best = findBestBlockedRegister();
-        if (mustHaveRegister || firstUsePos < nextUsePos[best.code()]) {
+        CodePosition bestNextUsed;
+        bestCode = findBestBlockedRegister(&bestNextUsed);
+        if (bestCode != Register::Codes::Invalid &&
+            (mustHaveRegister || firstUsePos < bestNextUsed))
+        {
+            Register best = Register::FromCode(bestCode);
+            IonSpew(IonSpew_LSRA, "  Decided best register was %s", best.name());
+
             if (!assign(LGeneralReg(best)))
                 return false;
-        } else {
-            if (!spill())
-                return false;
+
+            continue;
         }
+
+        IonSpew(IonSpew_LSRA, "  No registers available to spill");
+        JS_ASSERT(!mustHaveRegister);
+
+        if (!spill())
+            return false;
     }
 
     validateAllocations();
 
     return true;
 }
 
 /*
@@ -1025,128 +1034,133 @@ LinearScanAllocator::finishInterval(Live
     }
 
     handled.pushBack(interval);
 }
 
 /*
  * This function locates a register which may be assigned by the register
  * and is not assigned to any active interval. The register which is free
- * for the longest period of time is then returned. As a side effect,
- * the freeUntilPos array is updated for use elsewhere in the algorithm.
+ * for the longest period of time is then returned.
  */
-Register
-LinearScanAllocator::findBestFreeRegister()
+Register::Code
+LinearScanAllocator::findBestFreeRegister(CodePosition *freeUntil)
 {
-    // Update freeUntilPos for search
     IonSpew(IonSpew_LSRA, "  Computing freeUntilPos");
+
+    // Compute free-until positions for all registers
+    CodePosition freeUntilPos[Registers::Total];
     for (size_t i = 0; i < Registers::Total; i++) {
         if (allowedRegs.has(Register::FromCode(i)))
             freeUntilPos[i] = CodePosition::MAX;
-        else
-            freeUntilPos[i] = CodePosition::MIN;
     }
     for (IntervalIterator i(active.begin()); i != active.end(); i++) {
         if (i->getAllocation()->isGeneralReg()) {
             Register reg = i->getAllocation()->toGeneralReg()->reg();
+            IonSpew(IonSpew_LSRA, "   Register %s not free", reg.name());
             freeUntilPos[reg.code()] = CodePosition::MIN;
-            IonSpew(IonSpew_LSRA, "   Register %s not free", reg.name());
         }
     }
     for (IntervalIterator i(inactive.begin()); i != inactive.end(); i++) {
         if (i->getAllocation()->isGeneralReg()) {
             Register reg = i->getAllocation()->toGeneralReg()->reg();
             CodePosition pos = current->intersect(*i);
             if (pos != CodePosition::MIN && pos < freeUntilPos[reg.code()]) {
                 freeUntilPos[reg.code()] = pos;
                 IonSpew(IonSpew_LSRA, "   Register %s free until %u", reg.name(), pos.pos());
             }
         }
     }
 
-    // Search freeUntilPos for largest value
-    Register best = Register::FromCode(0);
-    for (uint32 i = 0; i < Registers::Total; i++) {
-        Register reg = Register::FromCode(i);
-        if (freeUntilPos[i] > freeUntilPos[best.code()])
-            best = reg;
-    }
-
-    // As an optimization, use the allocation from the previous interval if it
-    // is available.
+    Register::Code bestCode = Registers::Invalid;
     if (current->index()) {
+        // As an optimization, use the allocation from the previous interval if
+        // it is available.
         LiveInterval *previous = current->reg()->getInterval(current->index() - 1);
         if (previous->getAllocation()->isGeneralReg()) {
             Register prevReg = previous->getAllocation()->toGeneralReg()->reg();
             if (freeUntilPos[prevReg.code()] != CodePosition::MIN)
-                best = prevReg;
+                bestCode = prevReg.code();
+        }
+    }
+    if (bestCode == Registers::Invalid && firstUse && firstUse->use->policy() == LUse::FIXED) {
+        // As another, use the upcoming fixed register if it is available.
+        uint32 fixedReg = firstUse->use->registerCode();
+        if (freeUntilPos[fixedReg] != CodePosition::MIN)
+            bestCode = Register::Code(fixedReg);
+    }
+    if (bestCode == Registers::Invalid) {
+        // Search freeUntilPos for largest value
+        for (uint32 i = 0; i < Registers::Total; i++) {
+            if (freeUntilPos[i] == CodePosition::MIN)
+                continue;
+            if (bestCode == Registers::Invalid || freeUntilPos[i] > freeUntilPos[bestCode])
+                bestCode = Register::Code(i);
         }
     }
 
-    // Alternately, use the upcoming fixed register if it is available.
-    if (firstUse && firstUse->use->policy() == LUse::FIXED) {
-        uint32 fixedReg = firstUse->use->registerCode();
-        if (freeUntilPos[fixedReg] != CodePosition::MIN)
-            best = Register::FromCode(fixedReg);
-    }
-
-    return best;
+    if (bestCode != Registers::Invalid)
+        *freeUntil = freeUntilPos[bestCode];
+    return bestCode;
 }
 
 /*
  * This function locates a register which is assigned to an active interval,
  * and returns the one with the furthest away next use. As a side effect,
  * the nextUsePos array is updated with the next use position of all active
  * intervals for use elsewhere in the algorithm.
  */
-Register
-LinearScanAllocator::findBestBlockedRegister()
+Register::Code
+LinearScanAllocator::findBestBlockedRegister(CodePosition *nextUsed)
 {
-    // Update nextUsePos for search
     IonSpew(IonSpew_LSRA, "  Computing nextUsePos");
+
+    // Compute next-used positions for all registers
+    CodePosition nextUsePos[Registers::Total];
     for (size_t i = 0; i < Registers::Total; i++) {
         if (allowedRegs.has(Register::FromCode(i)))
             nextUsePos[i] = CodePosition::MAX;
-        else
-            nextUsePos[i] = CodePosition::MIN;
     }
     for (IntervalIterator i(active.begin()); i != active.end(); i++) {
         if (i->getAllocation()->isGeneralReg()) {
             Register reg = i->getAllocation()->toGeneralReg()->reg();
-            CodePosition nextUse = i->reg()->nextUsePosAfter(current->start());
-            IonSpew(IonSpew_LSRA, "   Register %s next used %u", reg.name(), nextUse.pos());
-
             if (i->start() == current->start()) {
-                IonSpew(IonSpew_LSRA, "    Disqualifying due to recency.");
-                nextUsePos[reg.code()] = current->start();
+                nextUsePos[reg.code()] = CodePosition::MIN;
+                IonSpew(IonSpew_LSRA, "   Disqualifying %s due to recency", reg.name());
             } else {
-                nextUsePos[reg.code()] = nextUse;
+                nextUsePos[reg.code()] = i->reg()->nextUsePosAfter(current->start());
+                IonSpew(IonSpew_LSRA, "   Register %s next used %u", reg.name(),
+                        nextUsePos[reg.code()].pos());
             }
         }
     }
     for (IntervalIterator i(inactive.begin()); i != inactive.end(); i++) {
         if (i->getAllocation()->isGeneralReg()) {
             Register reg = i->getAllocation()->toGeneralReg()->reg();
             CodePosition pos = i->reg()->nextUsePosAfter(current->start());
-            JS_ASSERT(i->covers(pos) || i->end() == pos);
+            JS_ASSERT(i->covers(pos));
             if (pos < nextUsePos[reg.code()]) {
                 nextUsePos[reg.code()] = pos;
                 IonSpew(IonSpew_LSRA, "   Register %s next used %u", reg.name(), pos.pos());
             }
         }
     }
 
     // Search nextUsePos for largest value
-    Register best = Register::FromCode(0);
+    Register::Code bestCode = Register::Codes::Invalid;
     for (size_t i = 0; i < Registers::Total; i++) {
-        if (nextUsePos[i] > nextUsePos[best.code()])
-            best = Register::FromCode(i);
+        if (nextUsePos[i] == CodePosition::MIN)
+            continue;
+        if (bestCode == Register::Codes::Invalid || nextUsePos[i] > nextUsePos[bestCode])
+            bestCode = Register::Code(i);
     }
-    IonSpew(IonSpew_LSRA, "  Decided best register was %s", best.name());
-    return best;
+
+    if (bestCode != Register::Codes::Invalid)
+        *nextUsed = nextUsePos[bestCode];
+    return bestCode;
 }
 
 /*
  * Two intervals can coexist if any of the following conditions is met:
  *
  *   - The intervals do not intersect.
  *   - The intervals have different allocations.
  *   - The intervals have the same allocation, but the allocation may be
--- a/js/src/ion/LinearScan.h
+++ b/js/src/ion/LinearScan.h
@@ -110,16 +110,19 @@ class CodePosition
      * This is the half of the instruction this code position represents, as
      * described in the huge comment above.
      */
     enum SubPosition {
         INPUT,
         OUTPUT
     };
 
+    CodePosition() : bits_(0)
+    { }
+
     CodePosition(uint32 instruction, SubPosition where) {
         JS_ASSERT(instruction < 0x80000000u);
         JS_ASSERT(((uint32)where & SUBPOSITION_MASK) == (uint32)where);
         bits_ = (instruction << INSTRUCTION_SHIFT) | (uint32)where;
     }
 
     uint32 ins() const {
         return bits_ >> INSTRUCTION_SHIFT;
@@ -459,34 +462,32 @@ class LinearScanAllocator
     StackAssignment stackAssignment;
 
     // Run-time state
     RegisterSet allowedRegs;
     UnhandledQueue unhandled;
     InlineList<LiveInterval> active;
     InlineList<LiveInterval> inactive;
     InlineList<LiveInterval> handled;
-    CodePosition *freeUntilPos;
-    CodePosition *nextUsePos;
     LiveInterval *current;
     LOperand *firstUse;
     CodePosition firstUsePos;
 
     bool createDataStructures();
     bool buildLivenessInfo();
     bool allocateRegisters();
     bool resolveControlFlow();
     bool reifyAllocations();
 
     bool splitInterval(LiveInterval *interval, CodePosition pos);
     bool assign(LAllocation allocation);
     bool spill();
     void finishInterval(LiveInterval *interval);
-    Register findBestFreeRegister();
-    Register findBestBlockedRegister();
+    Register::Code findBestFreeRegister(CodePosition *freeUntil);
+    Register::Code findBestBlockedRegister(CodePosition *nextUsed);
     bool canCoexist(LiveInterval *a, LiveInterval *b);
     LMoveGroup *getMoveGroupBefore(CodePosition pos);
     bool moveBefore(CodePosition pos, LiveInterval *from, LiveInterval *to);
     void setIntervalPriority(LiveInterval *interval);
 
 #ifdef DEBUG
     void validateIntervals();
     void validateAllocations();