Bug 937540 part 1 - Add |operator new| for placement new to TempObject and use for regalloc objects. r=luke
authorJan de Mooij <jdemooij@mozilla.com>
Sat, 16 Nov 2013 10:14:07 +0100
changeset 154952 cde2604ee22bc22358e338efee9a5cdcac060395
parent 154951 362b06bd93633cb80cf0fec568285840238f58a2
child 154953 d06ce084e3a98cb4c94c291317897801688e06ba
push id25657
push userMs2ger@gmail.com
push dateSun, 17 Nov 2013 13:24:50 +0000
treeherdermozilla-central@0e88f511e067 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersluke
bugs937540
milestone28.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 937540 part 1 - Add |operator new| for placement new to TempObject and use for regalloc objects. r=luke
js/src/jit/BacktrackingAllocator.cpp
js/src/jit/IonAllocPolicy.h
js/src/jit/LinearScan.cpp
js/src/jit/LiveRangeAllocator.cpp
js/src/jit/LiveRangeAllocator.h
js/src/jit/RegisterAllocator.h
--- a/js/src/jit/BacktrackingAllocator.cpp
+++ b/js/src/jit/BacktrackingAllocator.cpp
@@ -21,37 +21,37 @@ BacktrackingAllocator::init()
         AnyRegister reg = AnyRegister(remainingRegisters.takeGeneral());
         registers[reg.code()].allocatable = true;
     }
     while (!remainingRegisters.empty(/* float = */ true)) {
         AnyRegister reg = AnyRegister(remainingRegisters.takeFloat());
         registers[reg.code()].allocatable = true;
     }
 
-    LifoAlloc *alloc = mir->temp().lifoAlloc();
+    LifoAlloc *lifoAlloc = mir->temp().lifoAlloc();
     for (size_t i = 0; i < AnyRegister::Total; i++) {
         registers[i].reg = AnyRegister::FromCode(i);
-        registers[i].allocations.setAllocator(alloc);
+        registers[i].allocations.setAllocator(lifoAlloc);
 
         LiveInterval *fixed = fixedIntervals[i];
         for (size_t j = 0; j < fixed->numRanges(); j++) {
             AllocatedRange range(fixed, fixed->getRange(j));
             if (!registers[i].allocations.insert(range))
                 return false;
         }
     }
 
-    hotcode.setAllocator(alloc);
+    hotcode.setAllocator(lifoAlloc);
 
     // Partition the graph into hot and cold sections, for helping to make
     // splitting decisions. Since we don't have any profiling data this is a
     // crapshoot, so just mark the bodies of inner loops as hot and everything
     // else as cold.
 
-    LiveInterval *hotcodeInterval = new LiveInterval(0);
+    LiveInterval *hotcodeInterval = new(alloc()) LiveInterval(0);
 
     LBlock *backedge = nullptr;
     for (size_t i = 0; i < graph.numBlocks(); i++) {
         LBlock *block = graph.getBlock(i);
 
         // If we see a loop header, mark the backedge so we know when we have
         // hit the end of the loop. Don't process the loop immediately, so that
         // if there is an inner loop we will ignore the outer backedge.
@@ -222,17 +222,17 @@ BacktrackingAllocator::tryGroupRegisters
             return false;
         reg1->setGroup(group0);
         return true;
     }
 
     if (LifetimesOverlap(reg0, reg1))
         return true;
 
-    VirtualRegisterGroup *group = new VirtualRegisterGroup();
+    VirtualRegisterGroup *group = new(alloc()) VirtualRegisterGroup();
     if (!group->registers.append(vreg0) || !group->registers.append(vreg1))
         return false;
 
     reg0->setGroup(group);
     reg1->setGroup(group);
     return true;
 }
 
@@ -294,27 +294,27 @@ BacktrackingAllocator::tryGroupReusedReg
             return true;
         }
         if (use->policy() != LUse::ANY && use->policy() != LUse::KEEPALIVE) {
             reg.setMustCopyInput();
             return true;
         }
     }
 
-    LiveInterval *preInterval = new LiveInterval(interval->vreg(), 0);
+    LiveInterval *preInterval = new(alloc()) LiveInterval(interval->vreg(), 0);
     for (size_t i = 0; i < interval->numRanges(); i++) {
         const LiveInterval::Range *range = interval->getRange(i);
         JS_ASSERT(range->from <= inputOf(reg.ins()));
 
         CodePosition to = (range->to <= outputOf(reg.ins())) ? range->to : outputOf(reg.ins());
         if (!preInterval->addRange(range->from, to))
             return false;
     }
 
-    LiveInterval *postInterval = new LiveInterval(interval->vreg(), 0);
+    LiveInterval *postInterval = new(alloc()) LiveInterval(interval->vreg(), 0);
     if (!postInterval->addRange(inputOf(reg.ins()), interval->end()))
         return false;
 
     LiveIntervalVector newIntervals;
     if (!newIntervals.append(preInterval) || !newIntervals.append(postInterval))
         return false;
 
     distributeUses(interval, newIntervals);
@@ -779,17 +779,17 @@ BacktrackingAllocator::distributeUses(Li
         LiveInterval *addInterval = nullptr;
         for (size_t i = 0; i < newIntervals.length(); i++) {
             LiveInterval *newInterval = newIntervals[i];
             if (newInterval->covers(pos)) {
                 if (!addInterval || newInterval->start() < addInterval->start())
                     addInterval = newInterval;
             }
         }
-        addInterval->addUse(new UsePosition(iter->use, iter->pos));
+        addInterval->addUse(new(alloc()) UsePosition(iter->use, iter->pos));
     }
 }
 
 bool
 BacktrackingAllocator::split(LiveInterval *interval,
                              const LiveIntervalVector &newIntervals)
 {
     if (IonSpewEnabled(IonSpew_RegAlloc)) {
@@ -1317,17 +1317,17 @@ BacktrackingAllocator::dumpAllocations()
 #endif // DEBUG
 }
 
 bool
 BacktrackingAllocator::addLiveInterval(LiveIntervalVector &intervals, uint32_t vreg,
                                        LiveInterval *spillInterval,
                                        CodePosition from, CodePosition to)
 {
-    LiveInterval *interval = new LiveInterval(vreg, 0);
+    LiveInterval *interval = new(alloc()) LiveInterval(vreg, 0);
     interval->setSpillInterval(spillInterval);
     return interval->addRange(from, to) && intervals.append(interval);
 }
 
 ///////////////////////////////////////////////////////////////////////////////
 // Heuristic Methods
 ///////////////////////////////////////////////////////////////////////////////
 
@@ -1506,40 +1506,40 @@ BacktrackingAllocator::trySplitAcrossHot
         if (!hotRange->contains(interval->getRange(i))) {
             coldCode = true;
             break;
         }
     }
     if (!coldCode)
         return true;
 
-    LiveInterval *hotInterval = new LiveInterval(interval->vreg(), 0);
+    LiveInterval *hotInterval = new(alloc()) LiveInterval(interval->vreg(), 0);
     LiveInterval *preInterval = nullptr, *postInterval = nullptr;
 
     // Accumulate the ranges of hot and cold code in the interval. Note that
     // we are only comparing with the single hot range found, so the cold code
     // may contain separate hot ranges.
     Vector<LiveInterval::Range, 1, SystemAllocPolicy> hotList, coldList;
     for (size_t i = 0; i < interval->numRanges(); i++) {
         LiveInterval::Range hot, coldPre, coldPost;
         interval->getRange(i)->intersect(hotRange, &coldPre, &hot, &coldPost);
 
         if (!hot.empty() && !hotInterval->addRange(hot.from, hot.to))
             return false;
 
         if (!coldPre.empty()) {
             if (!preInterval)
-                preInterval = new LiveInterval(interval->vreg(), 0);
+                preInterval = new(alloc()) LiveInterval(interval->vreg(), 0);
             if (!preInterval->addRange(coldPre.from, coldPre.to))
                 return false;
         }
 
         if (!coldPost.empty()) {
             if (!postInterval)
-                postInterval = new LiveInterval(interval->vreg(), 0);
+                postInterval = new(alloc()) LiveInterval(interval->vreg(), 0);
             if (!postInterval->addRange(coldPost.from, coldPost.to))
                 return false;
         }
     }
 
     JS_ASSERT(preInterval || postInterval);
     JS_ASSERT(hotInterval->numRanges());
 
@@ -1582,18 +1582,18 @@ BacktrackingAllocator::trySplitAfterLast
         }
     }
 
     if (!lastRegisterFrom.pos() || lastRegisterFrom == lastUse) {
         // Can't trim non-register uses off the end by splitting.
         return true;
     }
 
-    LiveInterval *preInterval = new LiveInterval(interval->vreg(), 0);
-    LiveInterval *postInterval = new LiveInterval(interval->vreg(), 0);
+    LiveInterval *preInterval = new(alloc()) LiveInterval(interval->vreg(), 0);
+    LiveInterval *postInterval = new(alloc()) LiveInterval(interval->vreg(), 0);
 
     for (size_t i = 0; i < interval->numRanges(); i++) {
         const LiveInterval::Range *range = interval->getRange(i);
 
         if (range->from < lastRegisterTo) {
             CodePosition to = (range->to <= lastRegisterTo) ? range->to : lastRegisterTo;
             if (!preInterval->addRange(range->from, to))
                 return false;
@@ -1626,17 +1626,17 @@ BacktrackingAllocator::splitAtAllRegiste
     uint32_t vreg = interval->vreg();
 
     // If this LiveInterval is the result of an earlier split which created a
     // spill interval, that spill interval covers the whole range, so we don't
     // need to create a new one.
     bool spillIntervalIsNew = false;
     LiveInterval *spillInterval = interval->spillInterval();
     if (!spillInterval) {
-        spillInterval = new LiveInterval(vreg, 0);
+        spillInterval = new(alloc()) LiveInterval(vreg, 0);
         spillIntervalIsNew = true;
     }
 
     CodePosition spillStart = interval->start();
     if (isRegisterDefinition(interval)) {
         // Treat the definition of the interval as a register use so that it
         // can be split and spilled ASAP.
         CodePosition from = interval->start();
@@ -1656,35 +1656,35 @@ BacktrackingAllocator::splitAtAllRegiste
     }
 
     for (UsePositionIterator iter(interval->usesBegin());
          iter != interval->usesEnd();
          iter++)
     {
         LInstruction *ins = insData[iter->pos].ins();
         if (iter->pos < spillStart) {
-            newIntervals.back()->addUse(new UsePosition(iter->use, iter->pos));
+            newIntervals.back()->addUse(new(alloc()) UsePosition(iter->use, iter->pos));
         } else if (isRegisterUse(iter->use, ins)) {
             // For register uses which are not useRegisterAtStart, pick an
             // interval that covers both the instruction's input and output, so
             // that the register is not reused for an output.
             CodePosition from = inputOf(ins);
             CodePosition to = iter->pos.next();
 
             // Use the same interval for duplicate use positions, except when
             // the uses are fixed (they may require incompatible registers).
             if (newIntervals.empty() || newIntervals.back()->end() != to || iter->use->policy() == LUse::FIXED) {
                 if (!addLiveInterval(newIntervals, vreg, spillInterval, from, to))
                     return false;
             }
 
-            newIntervals.back()->addUse(new UsePosition(iter->use, iter->pos));
+            newIntervals.back()->addUse(new(alloc()) UsePosition(iter->use, iter->pos));
         } else {
             JS_ASSERT(spillIntervalIsNew);
-            spillInterval->addUse(new UsePosition(iter->use, iter->pos));
+            spillInterval->addUse(new(alloc()) UsePosition(iter->use, iter->pos));
         }
     }
 
     if (spillIntervalIsNew && !newIntervals.append(spillInterval))
         return false;
 
     return split(interval, newIntervals) && requeueIntervals(newIntervals);
 }
@@ -1718,43 +1718,43 @@ BacktrackingAllocator::splitAcrossCalls(
     uint32_t vreg = interval->vreg();
 
     // If this LiveInterval is the result of an earlier split which created a
     // spill interval, that spill interval covers the whole range, so we don't
     // need to create a new one.
     bool spillIntervalIsNew = false;
     LiveInterval *spillInterval = interval->spillInterval();
     if (!spillInterval) {
-        spillInterval = new LiveInterval(vreg, 0);
+        spillInterval = new(alloc()) LiveInterval(vreg, 0);
         spillIntervalIsNew = true;
 
         for (size_t i = 0; i < interval->numRanges(); i++) {
             const LiveInterval::Range *range = interval->getRange(i);
             CodePosition from = range->from < spillStart ? spillStart : range->from;
             if (!spillInterval->addRange(from, range->to))
                 return false;
         }
     }
 
     LiveIntervalVector newIntervals;
 
     CodePosition lastRegisterUse;
     if (spillStart != interval->start()) {
-        LiveInterval *newInterval = new LiveInterval(vreg, 0);
+        LiveInterval *newInterval = new(alloc()) LiveInterval(vreg, 0);
         newInterval->setSpillInterval(spillInterval);
         if (!newIntervals.append(newInterval))
             return false;
         lastRegisterUse = interval->start();
     }
 
     int activeCallPosition = callPositions.length() - 1;
     for (UsePositionIterator iter(interval->usesBegin()); iter != interval->usesEnd(); iter++) {
         LInstruction *ins = insData[iter->pos].ins();
         if (iter->pos < spillStart) {
-            newIntervals.back()->addUse(new UsePosition(iter->use, iter->pos));
+            newIntervals.back()->addUse(new(alloc()) UsePosition(iter->use, iter->pos));
         } else if (isRegisterUse(iter->use, ins)) {
             bool useNewInterval = false;
             if (lastRegisterUse.pos() == 0) {
                 useNewInterval = true;
             } else {
                 // Place this register use into a different interval from the
                 // last one if there are any calls between the two uses or if
                 // the register uses are in different subranges of the original
@@ -1774,26 +1774,26 @@ BacktrackingAllocator::splitAcrossCalls(
                         if (range->from <= lastRegisterUse && range->to <= iter->pos) {
                             useNewInterval = true;
                             break;
                         }
                     }
                 }
             }
             if (useNewInterval) {
-                LiveInterval *newInterval = new LiveInterval(vreg, 0);
+                LiveInterval *newInterval = new(alloc()) LiveInterval(vreg, 0);
                 newInterval->setSpillInterval(spillInterval);
                 if (!newIntervals.append(newInterval))
                     return false;
             }
-            newIntervals.back()->addUse(new UsePosition(iter->use, iter->pos));
+            newIntervals.back()->addUse(new(alloc()) UsePosition(iter->use, iter->pos));
             lastRegisterUse = iter->pos;
         } else {
             JS_ASSERT(spillIntervalIsNew);
-            spillInterval->addUse(new UsePosition(iter->use, iter->pos));
+            spillInterval->addUse(new(alloc()) UsePosition(iter->use, iter->pos));
         }
     }
 
     // Compute ranges for each new interval that cover all its uses.
     for (size_t i = 0; i < newIntervals.length(); i++) {
         LiveInterval *newInterval = newIntervals[i];
         CodePosition start, end;
         if (i == 0 && spillStart != interval->start()) {
--- a/js/src/jit/IonAllocPolicy.h
+++ b/js/src/jit/IonAllocPolicy.h
@@ -3,16 +3,17 @@
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef jit_IonAllocPolicy_h
 #define jit_IonAllocPolicy_h
 
 #include "mozilla/GuardObjects.h"
+#include "mozilla/TypeTraits.h"
 
 #include "jscntxt.h"
 
 #include "ds/LifoAlloc.h"
 #include "jit/InlineList.h"
 #include "jit/Ion.h"
 
 namespace js {
@@ -128,19 +129,23 @@ class AutoIonContextAlloc
     }
 };
 
 struct TempObject
 {
     inline void *operator new(size_t nbytes) {
         return GetIonContext()->temp->allocateInfallible(nbytes);
     }
-
-  public:
-    inline void *operator new(size_t nbytes, void *pos) {
+    inline void *operator new(size_t nbytes, TempAllocator &alloc) {
+        return alloc.allocateInfallible(nbytes);
+    }
+    template <class T>
+    inline void *operator new(size_t nbytes, T *pos) {
+        static_assert(mozilla::IsConvertible<T*, TempObject*>::value,
+                      "Placement new argument type must inherit from TempObject");
         return pos;
     }
 };
 
 template <typename T>
 class TempObjectPool
 {
     InlineForwardList<T> freed_;
--- a/js/src/jit/LinearScan.cpp
+++ b/js/src/jit/LinearScan.cpp
@@ -645,17 +645,17 @@ LinearScanAllocator::splitInterval(LiveI
     JS_ASSERT(interval->start() < pos && pos < interval->end());
 
     LinearScanVirtualRegister *reg = &vregs[interval->vreg()];
 
     // "Bogus" intervals cannot be split.
     JS_ASSERT(reg);
 
     // Do the split.
-    LiveInterval *newInterval = new LiveInterval(interval->vreg(), interval->index() + 1);
+    LiveInterval *newInterval = new(alloc()) LiveInterval(interval->vreg(), interval->index() + 1);
     if (!interval->splitFrom(pos, newInterval))
         return false;
 
     JS_ASSERT(interval->numRanges() > 0);
     JS_ASSERT(newInterval->numRanges() > 0);
 
     if (!reg->addInterval(newInterval))
         return false;
--- a/js/src/jit/LiveRangeAllocator.cpp
+++ b/js/src/jit/LiveRangeAllocator.cpp
@@ -401,53 +401,53 @@ LiveRangeAllocator<VREG>::init()
 
     liveIn = mir->allocate<BitSet*>(graph.numBlockIds());
     if (!liveIn)
         return false;
 
     // Initialize fixed intervals.
     for (size_t i = 0; i < AnyRegister::Total; i++) {
         AnyRegister reg = AnyRegister::FromCode(i);
-        LiveInterval *interval = new LiveInterval(0);
+        LiveInterval *interval = new(alloc()) LiveInterval(0);
         interval->setAllocation(LAllocation(reg));
         fixedIntervals[i] = interval;
     }
 
-    fixedIntervalsUnion = new LiveInterval(0);
+    fixedIntervalsUnion = new(alloc()) LiveInterval(0);
 
     if (!vregs.init(mir, graph.numVirtualRegisters()))
         return false;
 
     // Build virtual register objects
     for (size_t i = 0; i < graph.numBlocks(); i++) {
         if (mir->shouldCancel("Create data structures (main loop)"))
             return false;
 
         LBlock *block = graph.getBlock(i);
         for (LInstructionIterator ins = block->begin(); ins != block->end(); ins++) {
             for (size_t j = 0; j < ins->numDefs(); j++) {
                 LDefinition *def = ins->getDef(j);
                 if (def->policy() != LDefinition::PASSTHROUGH) {
-                    if (!vregs[def].init(block, *ins, def, /* isTemp */ false))
+                    if (!vregs[def].init(alloc(), block, *ins, def, /* isTemp */ false))
                         return false;
                 }
             }
 
             for (size_t j = 0; j < ins->numTemps(); j++) {
                 LDefinition *def = ins->getTemp(j);
                 if (def->isBogusTemp())
                     continue;
-                if (!vregs[def].init(block, *ins, def, /* isTemp */ true))
+                if (!vregs[def].init(alloc(), block, *ins, def, /* isTemp */ true))
                     return false;
             }
         }
         for (size_t j = 0; j < block->numPhis(); j++) {
             LPhi *phi = block->getPhi(j);
             LDefinition *def = phi->getDef(0);
-            if (!vregs[def].init(block, phi, def, /* isTemp */ false))
+            if (!vregs[def].init(alloc(), block, phi, def, /* isTemp */ false))
                 return false;
         }
     }
 
     return true;
 }
 
 static void
@@ -654,26 +654,26 @@ LiveRangeAllocator<VREG>::buildLivenessI
                     if (!vregs[temp].getInterval(0)->addRangeAtHead(from, to))
                         return false;
                 }
             }
 
             DebugOnly<bool> hasUseRegister = false;
             DebugOnly<bool> hasUseRegisterAtStart = false;
 
-            for (LInstruction::InputIterator alloc(**ins); alloc.more(); alloc.next()) {
-                if (alloc->isUse()) {
-                    LUse *use = alloc->toUse();
+            for (LInstruction::InputIterator inputAlloc(**ins); inputAlloc.more(); inputAlloc.next()) {
+                if (inputAlloc->isUse()) {
+                    LUse *use = inputAlloc->toUse();
 
                     // The first instruction, LLabel, has no uses.
                     JS_ASSERT(inputOf(*ins) > outputOf(block->firstId()));
 
                     // Call uses should always be at-start or fixed, since the fixed intervals
                     // use all registers.
-                    JS_ASSERT_IF(ins->isCall() && !alloc.isSnapshotInput(),
+                    JS_ASSERT_IF(ins->isCall() && !inputAlloc.isSnapshotInput(),
                                  use->isFixedRegister() || use->usedAtStart());
 
 #ifdef DEBUG
                     // Don't allow at-start call uses if there are temps of the same kind,
                     // so that we don't assign the same register.
                     if (ins->isCall() && use->usedAtStart()) {
                         for (size_t i = 0; i < ins->numTemps(); i++)
                             JS_ASSERT(vregs[ins->getTemp(i)].isDouble() != vregs[use].isDouble());
@@ -725,17 +725,17 @@ LiveRangeAllocator<VREG>::buildLivenessI
                                     to = inputOf(*ins);
                             }
                         }
                     }
 
                     LiveInterval *interval = vregs[use].getInterval(0);
                     if (!interval->addRangeAtHead(inputOf(block->firstId()), forLSRA ? to : to.next()))
                         return false;
-                    interval->addUse(new UsePosition(use, to));
+                    interval->addUse(new(alloc()) UsePosition(use, to));
 
                     live->insert(use->virtualRegister());
                 }
             }
         }
 
         // Phis have simultaneous assignment semantics at block begin, so at
         // the beginning of the block we can be sure that liveIn does not
--- a/js/src/jit/LiveRangeAllocator.h
+++ b/js/src/jit/LiveRangeAllocator.h
@@ -383,23 +383,25 @@ class VirtualRegister
 
     // Whether def_ is a temp or an output.
     bool isTemp_ : 1;
 
     void operator=(const VirtualRegister &) MOZ_DELETE;
     VirtualRegister(const VirtualRegister &) MOZ_DELETE;
 
   public:
-    bool init(LBlock *block, LInstruction *ins, LDefinition *def, bool isTemp) {
+    bool init(TempAllocator &alloc, LBlock *block, LInstruction *ins, LDefinition *def,
+              bool isTemp)
+    {
         JS_ASSERT(block && !block_);
         block_ = block;
         ins_ = ins;
         def_ = def;
         isTemp_ = isTemp;
-        LiveInterval *initial = new LiveInterval(def->virtualRegister(), 0);
+        LiveInterval *initial = new(alloc) LiveInterval(def->virtualRegister(), 0);
         if (!initial)
             return false;
         return intervals_.append(initial);
     }
     LBlock *block() {
         return block_;
     }
     LInstruction *ins() {
--- a/js/src/jit/RegisterAllocator.h
+++ b/js/src/jit/RegisterAllocator.h
@@ -313,16 +313,20 @@ class RegisterAllocator
             allRegisters_.take(AnyRegister(GlobalReg));
             allRegisters_.take(AnyRegister(NANReg));
         }
 #endif
     }
 
     bool init();
 
+    TempAllocator &alloc() const {
+        return mir->temp();
+    }
+
     CodePosition outputOf(uint32_t pos) const {
         return CodePosition(pos, CodePosition::OUTPUT);
     }
     CodePosition outputOf(const LInstruction *ins) const {
         return CodePosition(ins->id(), CodePosition::OUTPUT);
     }
     CodePosition inputOf(uint32_t pos) const {
         return CodePosition(pos, CodePosition::INPUT);