Bug 1385165 - Calculate spill weight of a range's uses when add to or remove from it. r=bhackett
authorTing-Yu Chou <janus926@gmail.com>
Mon, 14 Aug 2017 15:24:58 +0800
changeset 374987 df79199f9f0fcde01ddc2eccf2a5368cb7b801aa
parent 374986 1be0e59789420226a67613b6ef7b4e4a39b4bd48
child 374988 c7f8cb4848071a5738ccb92af50f81fa15b917ac
push id32343
push usercbook@mozilla.com
push dateWed, 16 Aug 2017 09:20:56 +0000
treeherdermozilla-central@0aa944d3ac94 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbhackett
bugs1385165
milestone57.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1385165 - Calculate spill weight of a range's uses when add to or remove from it. r=bhackett Iterating a LiveRange's uses in BacktrackingAllocator::computeSpillWeight() to access UsePosition.uses_ isn't cache friendly, because it is a linked list and each node is allocated separately. Changing the data strcture to a vector is worse because of the overhead from frequent insertion/removing. However, UsePostion.uses_ can't be changed after initialization, so we can pre-calculate the spill weight when it is added to or remove from a LiveRange. MozReview-Commit-ID: BJEvI7KBVAJ
js/src/jit/BacktrackingAllocator.cpp
js/src/jit/BacktrackingAllocator.h
--- a/js/src/jit/BacktrackingAllocator.cpp
+++ b/js/src/jit/BacktrackingAllocator.cpp
@@ -66,34 +66,63 @@ InsertSortedList(InlineForwardList<T> &l
     else
         list.pushFront(value);
 }
 
 /////////////////////////////////////////////////////////////////////
 // LiveRange
 /////////////////////////////////////////////////////////////////////
 
+inline void
+LiveRange::noteAddedUse(UsePosition* use)
+{
+    LUse::Policy policy = use->usePolicy();
+    usesSpillWeight_ += BacktrackingAllocator::SpillWeightFromUsePolicy(policy);
+    if (policy == LUse::FIXED)
+        ++numFixedUses_;
+}
+
+inline void
+LiveRange::noteRemovedUse(UsePosition* use)
+{
+    LUse::Policy policy = use->usePolicy();
+    usesSpillWeight_ -= BacktrackingAllocator::SpillWeightFromUsePolicy(policy);
+    if (policy == LUse::FIXED)
+        --numFixedUses_;
+    MOZ_ASSERT_IF(!hasUses(), !usesSpillWeight_ && !numFixedUses_);
+}
+
 void
 LiveRange::addUse(UsePosition* use)
 {
     MOZ_ASSERT(covers(use->pos));
     InsertSortedList(uses_, use);
+    noteAddedUse(use);
+}
+
+UsePosition*
+LiveRange::popUse()
+{
+    UsePosition* ret = uses_.popFront();
+    noteRemovedUse(ret);
+    return ret;
 }
 
 void
 LiveRange::distributeUses(LiveRange* other)
 {
     MOZ_ASSERT(other->vreg() == vreg());
     MOZ_ASSERT(this != other);
 
     // Move over all uses which fit in |other|'s boundaries.
     for (UsePositionIterator iter = usesBegin(); iter; ) {
         UsePosition* use = *iter;
         if (other->covers(use->pos)) {
             uses_.removeAndIncrement(iter);
+            noteRemovedUse(use);
             other->addUse(use);
         } else {
             iter++;
         }
     }
 
     // Distribute the definition to |other| as well, if possible.
     if (hasDefinition() && from() == other->from())
@@ -2541,37 +2570,19 @@ BacktrackingAllocator::computeSpillWeigh
             if (reg.def()->policy() == LDefinition::FIXED && reg.def()->output()->isRegister()) {
                 usesTotal += 2000;
                 fixed = true;
             } else if (!reg.ins()->isPhi()) {
                 usesTotal += 2000;
             }
         }
 
-        for (UsePositionIterator iter = range->usesBegin(); iter; iter++) {
-            switch (iter->usePolicy()) {
-              case LUse::ANY:
-                usesTotal += 1000;
-                break;
-
-              case LUse::FIXED:
-                fixed = true;
-                MOZ_FALLTHROUGH;
-              case LUse::REGISTER:
-                usesTotal += 2000;
-                break;
-
-              case LUse::KEEPALIVE:
-                break;
-
-              default:
-                // Note: RECOVERED_INPUT will not appear in UsePositionIterator.
-                MOZ_CRASH("Bad use");
-            }
-        }
+        usesTotal += range->usesSpillWeight();
+        if (range->numFixedUses() > 0)
+            fixed = true;
     }
 
     // Bundles with fixed uses are given a higher spill weight, since they must
     // be allocated to a specific register.
     if (testbed && fixed)
         usesTotal *= 2;
 
     // Compute spill weight as a use density, lowering the weight for long
--- a/js/src/jit/BacktrackingAllocator.h
+++ b/js/src/jit/BacktrackingAllocator.h
@@ -254,25 +254,40 @@ class LiveRange : public TempObject
     LiveBundle* bundle_;
 
     // The code positions in this range.
     Range range_;
 
     // All uses of the virtual register in this range, ordered by location.
     InlineForwardList<UsePosition> uses_;
 
+    // Total spill weight that calculate from all the uses' policy. Because the
+    // use's policy can't be changed after initialization, we can update the
+    // weight whenever a use is added to or remove from this range. This way, we
+    // don't need to iterate all the uses every time computeSpillWeight() is
+    // called.
+    size_t usesSpillWeight_;
+
+    // Number of uses that have policy LUse::FIXED.
+    uint32_t numFixedUses_;
+
     // Whether this range contains the virtual register's definition.
     bool hasDefinition_;
 
     LiveRange(uint32_t vreg, Range range)
-      : vreg_(vreg), bundle_(nullptr), range_(range), hasDefinition_(false)
+      : vreg_(vreg), bundle_(nullptr), range_(range), usesSpillWeight_(0),
+        numFixedUses_(0), hasDefinition_(false)
+
     {
         MOZ_ASSERT(!range.empty());
     }
 
+    void noteAddedUse(UsePosition* use);
+    void noteRemovedUse(UsePosition* use);
+
   public:
     static LiveRange* FallibleNew(TempAllocator& alloc, uint32_t vreg,
                                   CodePosition from, CodePosition to)
     {
         return new(alloc.fallible()) LiveRange(vreg, Range(from, to));
     }
 
     uint32_t vreg() const {
@@ -311,19 +326,17 @@ class LiveRange : public TempObject
         return uses_.begin();
     }
     UsePosition* lastUse() const {
         return uses_.back();
     }
     bool hasUses() const {
         return !!usesBegin();
     }
-    UsePosition* popUse() {
-        return uses_.popFront();
-    }
+    UsePosition* popUse();
 
     bool hasDefinition() const {
         return hasDefinition_;
     }
 
     void setFrom(CodePosition from) {
         range_.from = from;
         MOZ_ASSERT(!range_.empty());
@@ -340,16 +353,23 @@ class LiveRange : public TempObject
     void addUse(UsePosition* use);
     void distributeUses(LiveRange* other);
 
     void setHasDefinition() {
         MOZ_ASSERT(!hasDefinition_);
         hasDefinition_ = true;
     }
 
+    size_t usesSpillWeight() {
+        return usesSpillWeight_;
+    }
+    uint32_t numFixedUses() {
+        return numFixedUses_;
+    }
+
 #ifdef JS_JITSPEW
     // Return a string describing this range.
     UniqueChars toString() const;
 #endif
 
     // Comparator for use in range splay trees.
     static int compare(LiveRange* v0, LiveRange* v1) {
         // LiveRange includes 'from' but excludes 'to'.
@@ -679,16 +699,30 @@ class BacktrackingAllocator : protected 
       : RegisterAllocator(mir, lir, graph),
         testbed(testbed),
         liveIn(nullptr),
         callRanges(nullptr)
     { }
 
     MOZ_MUST_USE bool go();
 
+    static size_t SpillWeightFromUsePolicy(LUse::Policy policy) {
+        switch (policy) {
+        case LUse::ANY:
+            return 1000;
+
+        case LUse::REGISTER:
+        case LUse::FIXED:
+            return 2000;
+
+        default:
+            return 0;
+        }
+    }
+
   private:
 
     typedef Vector<LiveRange*, 4, SystemAllocPolicy> LiveRangeVector;
     typedef Vector<LiveBundle*, 4, SystemAllocPolicy> LiveBundleVector;
 
     // Liveness methods.
     MOZ_MUST_USE bool init();
     MOZ_MUST_USE bool buildLivenessInfo();