Bug 1027897 - IonMonkey: Refactor split position bookkeeping into helper classes. r=bhackett
authorDan Gohman <sunfish@mozilla.com>
Mon, 23 Jun 2014 13:42:07 -0700
changeset 190337 a965be2731d43676863a57e994d66275f7d0a4fa
parent 190336 c96c8e88e5cf63026d3c8a4c14070d276f582633
child 190338 bcd538f7e0195013ec7dca3b1a02db0a3ac53556
push id27004
push useremorley@mozilla.com
push dateTue, 24 Jun 2014 15:52:34 +0000
treeherdermozilla-central@7b174d47f3cc [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbhackett
bugs1027897
milestone33.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1027897 - IonMonkey: Refactor split position bookkeeping into helper classes. r=bhackett
js/src/jit/BacktrackingAllocator.cpp
js/src/jit/BacktrackingAllocator.h
--- a/js/src/jit/BacktrackingAllocator.cpp
+++ b/js/src/jit/BacktrackingAllocator.cpp
@@ -8,16 +8,62 @@
 #include "jit/BitSet.h"
 
 using namespace js;
 using namespace js::jit;
 
 using mozilla::DebugOnly;
 
 bool
+SplitPositions::append(CodePosition pos)
+{
+    MOZ_ASSERT(empty() || positions_.back() < pos,
+               "split positions must be sorted");
+    return positions_.append(pos);
+}
+
+bool
+SplitPositions::empty() const
+{
+    return positions_.empty();
+}
+
+SplitPositionsIterator::SplitPositionsIterator(const SplitPositions &splitPositions)
+  : splitPositions_(splitPositions),
+    current_(splitPositions_.positions_.begin())
+{
+    JS_ASSERT(!splitPositions_.empty());
+}
+
+// Proceed to the next split after |pos|.
+void
+SplitPositionsIterator::advancePast(CodePosition pos)
+{
+    JS_ASSERT(!splitPositions_.empty());
+    while (current_ < splitPositions_.positions_.end() && *current_ <= pos)
+        ++current_;
+}
+
+// Test whether |pos| is at or beyond the next split.
+bool
+SplitPositionsIterator::isBeyondNextSplit(CodePosition pos) const
+{
+    JS_ASSERT(!splitPositions_.empty());
+    return current_ < splitPositions_.positions_.end() && pos >= *current_;
+}
+
+// Test whether |pos| is beyond the next split.
+bool
+SplitPositionsIterator::isEndBeyondNextSplit(CodePosition pos) const
+{
+    JS_ASSERT(!splitPositions_.empty());
+    return current_ < splitPositions_.positions_.end() && pos > *current_;
+}
+
+bool
 BacktrackingAllocator::init()
 {
     RegisterSet remainingRegisters(allRegisters_);
     while (!remainingRegisters.empty(/* float = */ false)) {
         AnyRegister reg = AnyRegister(remainingRegisters.takeGeneral());
         registers[reg.code()].allocatable = true;
     }
     while (!remainingRegisters.empty(/* float = */ true)) {
@@ -1488,17 +1534,17 @@ BacktrackingAllocator::trySplitAcrossHot
     }
     if (!coldCode) {
         IonSpew(IonSpew_RegAlloc, "  interval does not contain cold code");
         return true;
     }
 
     IonSpew(IonSpew_RegAlloc, "  split across hot range %s", hotRange->toString());
 
-    SplitPositionVector splitPositions;
+    SplitPositions splitPositions;
     if (!splitPositions.append(hotRange->from) || !splitPositions.append(hotRange->to))
         return false;
     *success = true;
     return splitAt(interval, splitPositions);
 }
 
 bool
 BacktrackingAllocator::trySplitAfterLastRegisterUse(LiveInterval *interval, LiveInterval *conflict, bool *success)
@@ -1536,17 +1582,17 @@ BacktrackingAllocator::trySplitAfterLast
     if (lastRegisterFrom == lastUse) {
         IonSpew(IonSpew_RegAlloc, "  interval's last use is a register use");
         return true;
     }
 
     IonSpew(IonSpew_RegAlloc, "  split after last register use at %u",
             lastRegisterTo.pos());
 
-    SplitPositionVector splitPositions;
+    SplitPositions splitPositions;
     if (!splitPositions.append(lastRegisterTo))
         return false;
     *success = true;
     return splitAt(interval, splitPositions);
 }
 
 bool
 BacktrackingAllocator::splitAtAllRegisterUses(LiveInterval *interval)
@@ -1618,51 +1664,23 @@ BacktrackingAllocator::splitAtAllRegiste
     }
 
     if (spillIntervalIsNew && !newIntervals.append(spillInterval))
         return false;
 
     return split(interval, newIntervals) && requeueIntervals(newIntervals);
 }
 
-// Find the next split position after the current position.
-static size_t NextSplitPosition(size_t activeSplitPosition,
-                                const SplitPositionVector &splitPositions,
-                                CodePosition currentPos)
-{
-    while (activeSplitPosition < splitPositions.length() &&
-           splitPositions[activeSplitPosition] <= currentPos)
-    {
-        ++activeSplitPosition;
-    }
-    return activeSplitPosition;
-}
-
-// Test whether the current position has just crossed a split point.
-static bool SplitHere(size_t activeSplitPosition,
-                      const SplitPositionVector &splitPositions,
-                      CodePosition currentPos)
-{
-    return activeSplitPosition < splitPositions.length() &&
-           currentPos >= splitPositions[activeSplitPosition];
-}
-
 bool
-BacktrackingAllocator::splitAt(LiveInterval *interval,
-                               const SplitPositionVector &splitPositions)
+BacktrackingAllocator::splitAt(LiveInterval *interval, const SplitPositions &splitPositions)
 {
     // Split the interval at the given split points. Unlike splitAtAllRegisterUses,
     // consolidate any register uses which have no intervening split points into the
     // same resulting interval.
 
-    // splitPositions should be non-empty and sorted.
-    JS_ASSERT(!splitPositions.empty());
-    for (size_t i = 1; i < splitPositions.length(); ++i)
-        JS_ASSERT(splitPositions[i-1] < splitPositions[i]);
-
     // Don't spill the interval until after the end of its definition.
     CodePosition spillStart = interval->start();
     if (isRegisterDefinition(interval))
         spillStart = minimalDefEnd(insData[interval->start()].ins()).next();
 
     uint32_t vreg = interval->vreg();
 
     // If this LiveInterval is the result of an earlier split which created a
@@ -1688,35 +1706,35 @@ BacktrackingAllocator::splitAt(LiveInter
     if (spillStart != interval->start()) {
         LiveInterval *newInterval = LiveInterval::New(alloc(), vreg, 0);
         newInterval->setSpillInterval(spillInterval);
         if (!newIntervals.append(newInterval))
             return false;
         lastRegisterUse = interval->start();
     }
 
-    size_t activeSplitPosition = NextSplitPosition(0, splitPositions, interval->start());
+    SplitPositionsIterator splitIter(splitPositions);
+    splitIter.advancePast(interval->start());
+
     for (UsePositionIterator iter(interval->usesBegin()); iter != interval->usesEnd(); iter++) {
         LInstruction *ins = insData[iter->pos].ins();
         if (iter->pos < spillStart) {
             newIntervals.back()->addUseAtEnd(new(alloc()) UsePosition(iter->use, iter->pos));
-            activeSplitPosition = NextSplitPosition(activeSplitPosition, splitPositions, iter->pos);
+            splitIter.advancePast(iter->pos);
         } else if (isRegisterUse(iter->use, ins)) {
             if (lastRegisterUse.pos() == 0 ||
-                SplitHere(activeSplitPosition, splitPositions, iter->pos))
+                splitIter.isBeyondNextSplit(iter->pos))
             {
                 // Place this register use into a different interval from the
                 // last one if there are any split points between the two uses.
                 LiveInterval *newInterval = LiveInterval::New(alloc(), vreg, 0);
                 newInterval->setSpillInterval(spillInterval);
                 if (!newIntervals.append(newInterval))
                     return false;
-                activeSplitPosition = NextSplitPosition(activeSplitPosition,
-                                                        splitPositions,
-                                                        iter->pos);
+                splitIter.advancePast(iter->pos);
             }
             newIntervals.back()->addUseAtEnd(new(alloc()) UsePosition(iter->use, iter->pos));
             lastRegisterUse = iter->pos;
         } else {
             JS_ASSERT(spillIntervalIsNew);
             spillInterval->addUseAtEnd(new(alloc()) UsePosition(iter->use, iter->pos));
         }
     }
@@ -1761,33 +1779,29 @@ BacktrackingAllocator::splitAcrossCalls(
 {
     // Split the interval to separate register uses and non-register uses and
     // allow the vreg to be spilled across its range.
 
     // Find the locations of all calls in the interval's range. Fixed intervals
     // are introduced by buildLivenessInfo only for calls when allocating for
     // the backtracking allocator. fixedIntervalsUnion is sorted backwards, so
     // iterate through it backwards.
-    SplitPositionVector callPositions;
+    SplitPositions callPositions;
+    IonSpewStart(IonSpew_RegAlloc, "  split across calls at ");
     for (size_t i = fixedIntervalsUnion->numRanges(); i > 0; i--) {
         const LiveInterval::Range *range = fixedIntervalsUnion->getRange(i - 1);
         if (interval->covers(range->from) && interval->covers(range->from.previous())) {
+            if (!callPositions.empty())
+                IonSpewCont(IonSpew_RegAlloc, ", ");
+            IonSpewCont(IonSpew_RegAlloc, "%u", range->from);
             if (!callPositions.append(range->from))
                 return false;
         }
     }
-    JS_ASSERT(callPositions.length());
-
-#ifdef DEBUG
-    IonSpewStart(IonSpew_RegAlloc, "  split across calls at ");
-    for (size_t i = 0; i < callPositions.length(); ++i) {
-        IonSpewCont(IonSpew_RegAlloc, "%s%u", i != 0 ? ", " : "", callPositions[i].pos());
-    }
     IonSpewFin(IonSpew_RegAlloc);
-#endif
 
     return splitAt(interval, callPositions);
 }
 
 bool
 BacktrackingAllocator::chooseIntervalSplit(LiveInterval *interval, LiveInterval *conflict)
 {
     bool success = false;
--- a/js/src/jit/BacktrackingAllocator.h
+++ b/js/src/jit/BacktrackingAllocator.h
@@ -99,19 +99,42 @@ class BacktrackingVirtualRegister : publ
     void setGroup(VirtualRegisterGroup *group) {
         group_ = group;
     }
     VirtualRegisterGroup *group() {
         return group_;
     }
 };
 
+class SplitPositionsIterator;
+
 // A sequence of code positions, for tellings BacktrackingAllocator::splitAt
 // where to split.
-typedef js::Vector<CodePosition, 4, SystemAllocPolicy> SplitPositionVector;
+class SplitPositions {
+    friend class SplitPositionsIterator;
+
+    js::Vector<CodePosition, 4, SystemAllocPolicy> positions_;
+
+  public:
+    bool append(CodePosition pos);
+    bool empty() const;
+};
+
+// An iterator over the positions in a SplitPositions object.
+class SplitPositionsIterator {
+    const SplitPositions &splitPositions_;
+    const CodePosition *current_;
+
+  public:
+    explicit SplitPositionsIterator(const SplitPositions &splitPositions);
+
+    void advancePast(CodePosition pos);
+    bool isBeyondNextSplit(CodePosition pos) const;
+    bool isEndBeyondNextSplit(CodePosition pos) const;
+};
 
 class BacktrackingAllocator
   : private LiveRangeAllocator<BacktrackingVirtualRegister, /* forLSRA = */ false>
 {
     // Priority queue element: either an interval or group of intervals and the
     // associated priority.
     struct QueueItem
     {
@@ -233,18 +256,17 @@ class BacktrackingAllocator
     size_t computePriority(const LiveInterval *interval);
     size_t computeSpillWeight(const LiveInterval *interval);
 
     size_t computePriority(const VirtualRegisterGroup *group);
     size_t computeSpillWeight(const VirtualRegisterGroup *group);
 
     bool chooseIntervalSplit(LiveInterval *interval, LiveInterval *conflict);
 
-    bool splitAt(LiveInterval *interval,
-                 const SplitPositionVector &splitPositions);
+    bool splitAt(LiveInterval *interval, const SplitPositions &splitPositions);
     bool trySplitAcrossHotcode(LiveInterval *interval, bool *success);
     bool trySplitAfterLastRegisterUse(LiveInterval *interval, LiveInterval *conflict, bool *success);
     bool splitAtAllRegisterUses(LiveInterval *interval);
     bool splitAcrossCalls(LiveInterval *interval);
 };
 
 } // namespace jit
 } // namespace js