Fix LSRA assert triggered by DivI on ARM (bug 715460, r=dvander,mjrosenb)
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 12 Jan 2012 10:54:45 +0100
changeset 105560 dac569ab767c4cec743ccf198e4a50ca8066f182
parent 105559 ee228384056cc7c6e647c29656c301d44de120f5
child 105561 41a9e0d849ea49b063d3919d88e33ae5693b551a
push id1
push userroot
push dateMon, 20 Oct 2014 17:29:22 +0000
reviewersdvander, mjrosenb
bugs715460
milestone12.0a1
Fix LSRA assert triggered by DivI on ARM (bug 715460, r=dvander,mjrosenb)
js/src/ion/GreedyAllocator.cpp
js/src/ion/LinearScan.cpp
js/src/ion/LinearScan.h
js/src/ion/arm/LIR-arm.h
js/src/ion/arm/Lowering-arm.cpp
js/src/ion/x64/Lowering-x64.cpp
js/src/ion/x86/Lowering-x86.cpp
js/src/jit-test/tests/ion/bug715460.js
--- a/js/src/ion/GreedyAllocator.cpp
+++ b/js/src/ion/GreedyAllocator.cpp
@@ -177,21 +177,28 @@ GreedyAllocator::prescanUses(LInstructio
 {
     for (size_t i = 0; i < ins->numOperands(); i++) {
         LAllocation *a = ins->getOperand(i);
         if (!a->isUse()) {
             JS_ASSERT(a->isConstant());
             continue;
         }
 
-        VirtualRegister *vr = getVirtualRegister(a->toUse());
-        if (a->toUse()->policy() == LUse::FIXED)
-            disallowed.add(GetFixedRegister(vr->def, a->toUse()));
-        else if (vr->hasRegister())
+        LUse *use = a->toUse();
+        VirtualRegister *vr = getVirtualRegister(use);
+        if (use->policy() == LUse::FIXED) {
+            // If this use is marked as at-start, a def or temp may use the
+            // same register, so we have to use the unchecked version.
+            if (use->usedAtStart())
+                disallowed.addUnchecked(GetFixedRegister(vr->def, use));
+            else
+                disallowed.add(GetFixedRegister(vr->def, use));
+        } else if (vr->hasRegister()) {
             discouraged.addUnchecked(vr->reg());
+        }
     }
     return true;
 }
 
 static inline bool
 IsNunbox(LDefinition::Type type)
 {
 #ifdef JS_NUNBOX32
--- a/js/src/ion/LinearScan.cpp
+++ b/js/src/ion/LinearScan.cpp
@@ -202,16 +202,27 @@ LiveInterval::intersect(LiveInterval *ot
                 return ranges_[i].from;
             j++;
         }
     }
 
     return CodePosition::MIN;
 }
 
+bool
+LiveInterval::requireSpill(const LiveInterval *other) const
+{
+    // Spill intervals at call instructions force spills of everything
+    // except temps and defs of this call.
+    JS_ASSERT(isSpillInterval());
+    JS_ASSERT(!other->isSpillInterval());
+    return (other->index() > 0 || start() != other->start() ||
+            !other->reg()->ins()->isCall());
+}
+
 /*
  * This function takes the callee interval and moves all ranges following or
  * including provided position to the target interval. Additionally, if a
  * range in the callee interval spans the given position, it is split and the
  * latter half is placed in the target interval.
  *
  * This function should only be called if it is known that the interval should
  * actually be split (and, presumably, a move inserted). As such, it is an
@@ -306,16 +317,32 @@ LiveInterval::nextUseAfter(CodePosition 
     for (UsePositionIterator usePos(usesBegin()); usePos != usesEnd(); usePos++) {
         if (usePos->pos >= after && usePos->use->policy() != LUse::KEEPALIVE)
             return *usePos;
     }
     return NULL;
 }
 
 /*
+ * Return the UsePosition with FIXED policy at position pos, or NULL
+ * if there is no such use.
+ */
+UsePosition *
+LiveInterval::fixedUseAt(CodePosition pos)
+{
+    for (UsePositionIterator usePos(usesBegin()); usePos != usesEnd(); usePos++) {
+        if (usePos->pos > pos)
+            return NULL;
+        if (usePos->pos == pos && usePos->use->policy() == LUse::FIXED)
+            return *usePos;
+    }
+    return NULL;
+}
+
+/*
  * This function locates the first "real" use of this interval that follows
  * the given code position. Non-"real" uses are currently just snapshots,
  * which keep virtual registers alive but do not result in the
  * generation of code that use them.
  */
 CodePosition
 LiveInterval::nextUsePosAfter(CodePosition after)
 {
@@ -389,30 +416,91 @@ LinearScanAllocator::createDataStructure
                 return false;
         }
     }
 
     return true;
 }
 
 /*
- * Allocates a bogus interval covering the input half of the given instruction,
- * such that the given requirement is met for that interval, but unused.
+ * Allocates a bogus interval, such that the given requirement is met for
+ * that interval, but unused.
  */
 void
-LinearScanAllocator::addSpillInterval(LInstruction *ins, const Requirement &req)
+LinearScanAllocator::addSpillInterval(CodePosition pos, const Requirement &req)
 {
     LiveInterval *bogus = new LiveInterval(NULL, 0);
 
-    bogus->addRange(inputOf(ins), outputOf(ins));
+    bogus->addRange(pos, pos.next());
     bogus->setRequirement(req);
 
     unhandled.enqueueAtHead(bogus);
 }
 
+static inline AnyRegister
+GetFixedRegister(LDefinition *def, LUse *use)
+{
+    return def->type() == LDefinition::DOUBLE
+           ? AnyRegister(FloatRegister::FromCode(use->registerCode()))
+           : AnyRegister(Register::FromCode(use->registerCode()));
+}
+
+static inline bool
+IsNunbox(VirtualRegister *vreg)
+{
+#ifdef JS_NUNBOX32
+    return (vreg->type() == LDefinition::TYPE ||
+            vreg->type() == LDefinition::PAYLOAD);
+#else
+    return false;
+#endif
+}
+
+static inline bool
+IsTraceable(VirtualRegister *reg)
+{
+    if (reg->type() == LDefinition::OBJECT)
+        return true;
+#ifdef JS_PUNBOX64
+    if (reg->type() == LDefinition::BOX)
+        return true;
+#endif
+    return false;
+}
+
+/*
+ * Handle instructions which want the same value in two different fixed registers
+ * by allocating a spill interval for the second register and inserting a move
+ * from the first fixed register to the second fixed register.
+ */
+bool
+LinearScanAllocator::copyFixedRegister(LInstruction *ins, CodePosition pos, LUse *src, LUse *dest)
+{
+    JS_ASSERT(src->virtualRegister() == dest->virtualRegister());
+    JS_ASSERT(!IsTraceable(&vregs[src]));
+    JS_ASSERT(!IsNunbox(&vregs[src]));
+
+    AnyRegister srcReg = GetFixedRegister(vregs[src].def(), src);
+    AnyRegister destReg = GetFixedRegister(vregs[dest].def(), dest);
+    JS_ASSERT(srcReg != destReg);
+
+    // Prevent other intervals from using this fixed register.
+    addSpillInterval(pos, Requirement(LAllocation(destReg)));
+
+    // Insert a move to this fixed register.
+    LAllocation *srcAlloc = LAllocation::New(srcReg);
+    LAllocation *destAlloc = LAllocation::New(destReg);
+
+    if (!moveBeforeAlloc(inputOf(ins), srcAlloc, destAlloc))
+        return false;
+
+    *static_cast<LAllocation *>(dest) = *destAlloc;
+    return true;
+}
+
 /*
  * This function builds up liveness intervals for all virtual registers
  * defined in the function. Additionally, it populates the liveIn array with
  * information about which registers are live at the beginning of a block, to
  * aid resolution and reification in a later phase.
  *
  * The algorithm is based on the one published in:
  *
@@ -472,17 +560,17 @@ LinearScanAllocator::buildLivenessInfo()
         }
 
         // Shorten the front end of live intervals for live variables to their
         // point of definition, if found.
         for (LInstructionReverseIterator ins = block->rbegin(); ins != block->rend(); ins++) {
             // Calls may clobber registers, so force a spill and reload around the callsite.
             if (ins->isCall()) {
                 for (AnyRegisterIterator iter(RegisterSet::All()); iter.more(); iter++)
-                    addSpillInterval(*ins, Requirement(LAllocation(*iter)));
+                    addSpillInterval(inputOf(*ins), Requirement(LAllocation(*iter)));
             }
 
             for (size_t i = 0; i < ins->numDefs(); i++) {
                 if (ins->getDef(i)->policy() != LDefinition::PASSTHROUGH) {
                     LDefinition *def = ins->getDef(i);
                     CodePosition from(inputOf(*ins));
 
                     // Ensure that if there aren't any uses, there's at least
@@ -509,16 +597,27 @@ LinearScanAllocator::buildLivenessInfo()
                     JS_ASSERT(inputOf(*ins) > outputOf(block->firstId()));
 
                     // Call uses should always be at-start, since all registers (except temps
                     // and defs) are spilled.
                     JS_ASSERT_IF(ins->isCall() && !alloc.isSnapshotInput(), use->usedAtStart());
 
                     CodePosition endPos = use->usedAtStart() ? inputOf(*ins) : outputOf(*ins);
                     LiveInterval *interval = vregs[use].getInterval(0);
+
+                    // Handle the (very rare) case where an instruction wants the same value in
+                    // two different fixed registers.
+                    if (use->policy() == LUse::FIXED) {
+                        if (UsePosition *other = interval->fixedUseAt(endPos)) {
+                            if (!copyFixedRegister(*ins, endPos.previous(), other->use, use))
+                                return false;
+                            continue;
+                        }
+                    }
+
                     interval->addRange(inputOf(block->firstId()), endPos);
                     interval->addUse(new UsePosition(use, endPos));
 
                     live->insert(use->virtualRegister());
                 }
             }
         }
 
@@ -973,39 +1072,16 @@ LinearScanAllocator::reifyAllocations()
     }} // Iteration over virtual register intervals.
 
     // Set the graph overall stack height
     graph.setLocalSlotCount(stackSlotAllocator.stackHeight());
 
     return true;
 }
 
-static inline bool
-IsNunbox(VirtualRegister *vreg)
-{
-#ifdef JS_NUNBOX32
-    return (vreg->type() == LDefinition::TYPE ||
-            vreg->type() == LDefinition::PAYLOAD);
-#else
-    return false;
-#endif
-}
-
-static inline bool
-IsTraceable(VirtualRegister *reg)
-{
-    if (reg->type() == LDefinition::OBJECT)
-        return true;
-#ifdef JS_PUNBOX64
-    if (reg->type() == LDefinition::BOX)
-        return true;
-#endif
-    return false;
-}
-
 // Finds the first safepoint that is within range of an interval.
 size_t
 LinearScanAllocator::findFirstSafepoint(LiveInterval *interval, size_t startFrom)
 {
     size_t i = startFrom;
     for (; i < graph.numSafepoints(); i++) {
         LInstruction *ins = graph.getSafepoint(i);
         if (interval->start() <= inputOf(ins))
@@ -1824,22 +1900,17 @@ LinearScanAllocator::setIntervalRequirem
                 if (!registerOp || usePos->pos.ins() < registerOp->pos.ins())
                     registerOp = *usePos;
             }
         }
     }
 
     if (fixedOp) {
         // Intervals with a fixed use now get a FIXED requirement/hint
-        AnyRegister required;
-        if (reg->isDouble())
-            required = AnyRegister(FloatRegister::FromCode(fixedOp->use->registerCode()));
-        else
-            required = AnyRegister(Register::FromCode(fixedOp->use->registerCode()));
-
+        AnyRegister required = GetFixedRegister(reg->def(), fixedOp->use);
         if (fixedOpIsHint)
             interval->setHint(Requirement(LAllocation(required), fixedOp->pos));
         else
             interval->setRequirement(Requirement(LAllocation(required)));
     } else if (registerOp) {
         // Intervals with register uses get a REGISTER hint. We may have already
         // assigned a SAME_AS hint, make sure we don't overwrite it with a weaker
         // hint.
--- a/js/src/ion/LinearScan.h
+++ b/js/src/ion/LinearScan.h
@@ -331,28 +331,22 @@ class LiveInterval
         hint_ = hint;
     }
     bool isSpill() const {
         return alloc_.isStackSlot();
     }
     bool isSpillInterval() const {
         return !reg_;
     }
-    bool requireSpill(const LiveInterval *other) const {
-        // Spill intervals at call instructions force spills of everything
-        // except temps and defs of this call.
-        JS_ASSERT(isSpillInterval());
-        JS_ASSERT(!other->isSpillInterval());
-        return (other->index() > 0 || start() != other->start());
-    }
-
+    bool requireSpill(const LiveInterval *other) const;
     bool splitFrom(CodePosition pos, LiveInterval *after);
 
     void addUse(UsePosition *use);
     UsePosition *nextUseAfter(CodePosition pos);
+    UsePosition *fixedUseAt(CodePosition pos);
     CodePosition nextUsePosAfter(CodePosition pos);
     CodePosition firstIncompatibleUse(LAllocation alloc);
 
     UsePositionIterator usesBegin() const {
         return uses_.begin();
     }
 
     UsePositionIterator usesEnd() const {
@@ -607,17 +601,18 @@ class LinearScanAllocator
     bool canCoexist(LiveInterval *a, LiveInterval *b);
     LMoveGroup *getMoveGroupBefore(CodePosition pos);
     LMoveGroup *getMoveGroupAfter(CodePosition pos);
     bool addMove(LMoveGroup *moves, LiveInterval *from, LiveInterval *to);
     bool moveBefore(CodePosition pos, LiveInterval *from, LiveInterval *to);
     bool moveBeforeAlloc(CodePosition pos, LAllocation *from, LAllocation *to);
     bool moveAfter(CodePosition pos, LiveInterval *from, LiveInterval *to);
     void setIntervalRequirement(LiveInterval *interval);
-    void addSpillInterval(LInstruction *ins, const Requirement &req);
+    void addSpillInterval(CodePosition pos, const Requirement &req);
+    bool copyFixedRegister(LInstruction *ins, CodePosition pos, LUse *src, LUse *dest);
     size_t findFirstSafepoint(LiveInterval *interval, size_t firstSafepoint);
 
 #ifdef DEBUG
     void validateIntervals();
     void validateAllocations();
 #else
     inline void validateIntervals() { };
     inline void validateAllocations() { };
--- a/js/src/ion/arm/LIR-arm.h
+++ b/js/src/ion/arm/LIR-arm.h
@@ -126,32 +126,24 @@ class LDouble : public LInstructionHelpe
 // being, the link register is not marked as trashed because we never allocate
 // to the link register.
 class LDivI : public LBinaryMath<3>
 {
   public:
     LIR_HEADER(DivI);
 
     LDivI(const LAllocation &lhs, const LAllocation &rhs,
-          const LDefinition &temp1, const LDefinition &temp2
-#if 0
-          , const LDefinition &temp3
-#endif
-) {
+          const LDefinition &temp1, const LDefinition &temp2,
+          const LDefinition &temp3)
+    {
         setOperand(0, lhs);
         setOperand(1, rhs);
         setTemp(0, temp1);
         setTemp(1, temp2);
-#if 0
         setTemp(2, temp3);
-#endif
-    }
-
-    const LDefinition *remainder() {
-        return getTemp(0);
     }
 };
 // Takes a tableswitch with an integer to decide
 class LTableSwitch : public LInstructionHelper<0, 1, 1>
 {
   public:
     LIR_HEADER(TableSwitch);
 
--- a/js/src/ion/arm/Lowering-arm.cpp
+++ b/js/src/ion/arm/Lowering-arm.cpp
@@ -270,22 +270,21 @@ LIRGeneratorARM::lowerForShift(LInstruct
     }
     return define(ins, mir,
                   LDefinition(LDefinition::TypeFrom(mir->type()), LDefinition::DEFAULT));
 }
 
 bool
 LIRGeneratorARM::lowerDivI(MDiv *div)
 {
-    LDivI *lir = new LDivI(useFixed(div->lhs(), r0), useFixed(div->rhs(), r1),
-                           tempFixed(r2), tempFixed(r3)/*, tempFixed(lr)*/);
-    
-    return define(lir, div,
-                  LDefinition(LDefinition::TypeFrom(div->type()), LDefinition::DEFAULT))
-    && assignSnapshot(lir);
+    // Note: r1 has both a use and a temp, so that the C call can clobber it.
+    LDivI *lir = new LDivI(useFixedAtStart(div->lhs(), r0), useFixedAtStart(div->rhs(), r1),
+                           tempFixed(r1), tempFixed(r2), tempFixed(r3)/*, tempFixed(lr)*/);
+
+    return defineFixed(lir, div, LAllocation(AnyRegister(r0))) && assignSnapshot(lir);
 }
 
 bool
 LIRGeneratorARM::lowerMulI(MMul *mul, MDefinition *lhs, MDefinition *rhs)
 {
     LMulI *lir = new LMulI;
     if (mul->fallible() && !assignSnapshot(lir))
         return false;
--- a/js/src/ion/x64/Lowering-x64.cpp
+++ b/js/src/ion/x64/Lowering-x64.cpp
@@ -168,17 +168,17 @@ LIRGeneratorX64::lowerUntypedPhiInput(MP
 {
     lowerTypedPhiInput(phi, inputPosition, block, lirIndex);
 }
 
 bool
 LIRGeneratorX64::lowerDivI(MDiv *div)
 {
     LDivI *lir = new LDivI(useFixedAtStart(div->lhs(), rax), useRegister(div->rhs()), tempFixed(rdx));
-    return defineReuseInput(lir, div, 0) && assignSnapshot(lir);
+    return defineFixed(lir, div, LAllocation(AnyRegister(rax))) && assignSnapshot(lir);
 }
 
 bool
 LIRGeneratorX64::lowerModI(MMod *mod)
 {
     LModI *lir = new LModI(useFixed(mod->lhs(), rax), useRegister(mod->rhs()));
     return defineFixed(lir, mod, LAllocation(AnyRegister(rdx))) && assignSnapshot(lir);
 }
--- a/js/src/ion/x86/Lowering-x86.cpp
+++ b/js/src/ion/x86/Lowering-x86.cpp
@@ -242,17 +242,17 @@ LIRGeneratorX86::lowerUntypedPhiInput(MP
     type->setOperand(inputPosition, LUse(operand->virtualRegister() + VREG_TYPE_OFFSET, LUse::ANY));
     payload->setOperand(inputPosition, LUse(VirtualRegisterOfPayload(operand), LUse::ANY));
 }
 
 bool
 LIRGeneratorX86::lowerDivI(MDiv *div)
 {
     LDivI *lir = new LDivI(useFixedAtStart(div->lhs(), eax), useRegister(div->rhs()), tempFixed(edx));
-    return defineReuseInput(lir, div, 0) && assignSnapshot(lir);
+    return defineFixed(lir, div, LAllocation(AnyRegister(eax))) && assignSnapshot(lir);
 }
 
 bool
 LIRGeneratorX86::lowerModI(MMod *mod)
 {
     LModI *lir = new LModI(useFixed(mod->lhs(), eax), useRegister(mod->rhs()));
     return defineFixed(lir, mod, LAllocation(AnyRegister(edx))) && assignSnapshot(lir);
 }
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/ion/bug715460.js
@@ -0,0 +1,13 @@
+function f(x) {
+    var a = x;
+    return a / 10;
+}
+for (var i=0; i<100; i++)
+    assertEq(f(i * 10), i);
+
+function g(x) {
+    var y = x + 1;
+    return y / y;
+}
+for (var i=0; i<100; i++)
+    assertEq(g(i), 1);