Merge backout.
authorDavid Anderson <danderson@mozilla.com>
Mon, 06 Sep 2010 18:22:30 -0700
changeset 74562 495774a53ba64376fe40187684450bd076df09bf
parent 74560 c76ed2a6bb61e195e8d485879312bb8735869318 (current diff)
parent 74561 d964486cd584b1b5871a56dab778651488ef8af6 (diff)
child 74564 0d06f0c22e437821c94a632967080f755e197211
push id2
push userbsmedberg@mozilla.com
push dateFri, 19 Aug 2011 14:38:13 +0000
milestone2.0b6pre
Merge backout.
js/src/methodjit/FrameState.cpp
js/src/methodjit/FrameState.h
--- a/js/src/methodjit/FrameState-inl.h
+++ b/js/src/methodjit/FrameState-inl.h
@@ -656,16 +656,22 @@ inline void
 FrameState::forgetAllRegs(FrameEntry *fe)
 {
     if (fe->type.inRegister())
         forgetReg(fe->type.reg());
     if (fe->data.inRegister())
         forgetReg(fe->data.reg());
 }
 
+inline FrameEntry *
+FrameState::tosFe() const
+{
+    return sp;
+}
+
 inline void
 FrameState::swapInTracker(FrameEntry *lhs, FrameEntry *rhs)
 {
     uint32 li = lhs->trackerIndex();
     uint32 ri = rhs->trackerIndex();
     JS_ASSERT(tracker[li] == lhs);
     JS_ASSERT(tracker[ri] == rhs);
     tracker.entries[ri] = lhs;
--- a/js/src/methodjit/FrameState.cpp
+++ b/js/src/methodjit/FrameState.cpp
@@ -267,26 +267,27 @@ FrameState::storeTo(FrameEntry *fe, Addr
 }
 
 #ifdef DEBUG
 void
 FrameState::assertValidRegisterState() const
 {
     Registers checkedFreeRegs;
 
+    FrameEntry *tos = tosFe();
     for (uint32 i = 0; i < tracker.nentries; i++) {
         FrameEntry *fe = tracker[i];
-        if (fe >= sp)
+        if (fe >= tos)
             continue;
 
         JS_ASSERT(i == fe->trackerIndex());
         JS_ASSERT_IF(fe->isCopy(),
                      fe->trackerIndex() > fe->copyOf()->trackerIndex());
         JS_ASSERT_IF(fe->isCopy(), !fe->type.inRegister() && !fe->data.inRegister());
-        JS_ASSERT_IF(fe->isCopy(), fe->copyOf() < sp);
+        JS_ASSERT_IF(fe->isCopy(), fe->copyOf() < tos);
         JS_ASSERT_IF(fe->isCopy(), fe->copyOf()->isCopied());
 
         if (fe->isCopy())
             continue;
         if (fe->type.inRegister()) {
             checkedFreeRegs.takeReg(fe->type.reg());
             JS_ASSERT(regstate[fe->type.reg()].fe() == fe);
         }
@@ -308,43 +309,45 @@ FrameState::assertValidRegisterState() c
 
 void
 FrameState::syncFancy(Assembler &masm, Registers avail, uint32 resumeAt,
                       FrameEntry *bottom) const
 {
     /* :TODO: can be resumeAt? */
     reifier.reset(&masm, avail, tracker.nentries, bottom);
 
+    FrameEntry *tos = tosFe();
     for (uint32 i = resumeAt; i < tracker.nentries; i--) {
         FrameEntry *fe = tracker[i];
-        if (fe >= sp)
+        if (fe >= tos)
             continue;
 
         reifier.sync(fe);
     }
 }
 
 void
 FrameState::sync(Assembler &masm, Uses uses) const
 {
     /*
      * Keep track of free registers using a bitmask. If we have to drop into
      * syncFancy(), then this mask will help avoid eviction.
      */
     Registers avail(freeRegs);
     Registers temp(Registers::TempRegs);
 
-    FrameEntry *bottom = sp - uses.nuses;
+    FrameEntry *tos = tosFe();
+    FrameEntry *bottom = tos - uses.nuses;
 
     if (inTryBlock)
         bottom = NULL;
 
     for (uint32 i = tracker.nentries - 1; i < tracker.nentries; i--) {
         FrameEntry *fe = tracker[i];
-        if (fe >= sp)
+        if (fe >= tos)
             continue;
 
         Address address = addressOf(fe);
 
         if (!fe->isCopy()) {
             /* Keep track of registers that can be clobbered. */
             if (fe->data.inRegister())
                 avail.putReg(fe->data.reg());
@@ -389,24 +392,25 @@ FrameState::sync(Assembler &masm, Uses u
         }
     }
 }
 
 void
 FrameState::syncAndKill(Registers kill, Uses uses)
 {
     /* Backwards, so we can allocate registers to backing slots better. */
-    FrameEntry *bottom = sp - uses.nuses;
+    FrameEntry *tos = tosFe();
+    FrameEntry *bottom = tos - uses.nuses;
 
     if (inTryBlock)
         bottom = NULL;
 
     for (uint32 i = tracker.nentries - 1; i < tracker.nentries; i--) {
         FrameEntry *fe = tracker[i];
-        if (fe >= sp)
+        if (fe >= tos)
             continue;
 
         Address address = addressOf(fe);
         FrameEntry *backing = fe;
         if (fe->isCopy()) {
             if (!inTryBlock && fe < bottom)
                 continue;
             backing = fe->copyOf();
@@ -445,21 +449,22 @@ FrameState::syncAndKill(Registers kill, 
             fe->type.setMemory();
         }
     }
 }
 
 void
 FrameState::merge(Assembler &masm, Changes changes) const
 {
+    FrameEntry *tos = tosFe();
     Registers temp(Registers::TempRegs);
 
     for (uint32 i = 0; i < tracker.nentries; i++) {
         FrameEntry *fe = tracker[i];
-        if (fe >= sp)
+        if (fe >= tos)
             continue;
 
         /* Copies do not have registers. */
         if (fe->isCopy()) {
             JS_ASSERT(!fe->data.inRegister());
             JS_ASSERT(!fe->type.inRegister());
             continue;
         }
@@ -774,21 +779,22 @@ FrameState::uncopy(FrameEntry *original)
      *    [A, B, C, D]
      * And the tracker has:
      *    [A, D, C, B]
      *
      * If B, C, and D are copies of A - we will walk the tracker to the end
      * and select D, not B (see bug 583684).
      */
     uint32 firstCopy = InvalidIndex;
+    FrameEntry *tos = tosFe();
     FrameEntry *bestFe = NULL;
     uint32 ncopies = 0;
     for (uint32 i = 0; i < tracker.nentries; i++) {
         FrameEntry *fe = tracker[i];
-        if (fe >= sp)
+        if (fe >= tos)
             continue;
         if (fe->isCopy() && fe->copyOf() == original) {
             if (firstCopy == InvalidIndex) {
                 firstCopy = i;
                 bestFe = fe;
             } else if (fe < bestFe) {
                 bestFe = fe;
             }
@@ -807,17 +813,17 @@ FrameState::uncopy(FrameEntry *original)
     JS_ASSERT(bestFe);
 
     /* Mark all extra copies as copies of the new backing index. */
     bestFe->setCopyOf(NULL);
     if (ncopies > 1) {
         bestFe->setCopied();
         for (uint32 i = firstCopy; i < tracker.nentries; i++) {
             FrameEntry *other = tracker[i];
-            if (other >= sp || other == bestFe)
+            if (other >= tos || other == bestFe)
                 continue;
 
             /* The original must be tracked before copies. */
             JS_ASSERT(other != original);
 
             if (!other->isCopy() || other->copyOf() != original)
                 continue;
 
@@ -960,19 +966,20 @@ FrameState::storeLocal(uint32 n, bool po
          * 
          * Because of |let| expressions, it's kind of hard to really know
          * whether a region on the stack will be popped all at once. Bleh!
          *
          * This should be rare except in browser code (and maybe even then),
          * but even so there's a quick workaround. We take all copies of the
          * backing fe, and redirect them to be copies of the destination.
          */
+        FrameEntry *tos = tosFe();
         for (uint32 i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
             FrameEntry *fe = tracker[i];
-            if (fe >= sp)
+            if (fe >= tos)
                 continue;
             if (fe->isCopy() && fe->copyOf() == backing)
                 fe->setCopyOf(localFe);
         }
     }
     backing->setNotCopied();
     
     /*
--- a/js/src/methodjit/FrameState.h
+++ b/js/src/methodjit/FrameState.h
@@ -711,16 +711,17 @@ class FrameState
      */
     inline void giveOwnRegs(FrameEntry *fe);
 
     /*
      * Returns the current stack depth of the frame.
      */
     uint32 stackDepth() const { return sp - spBase; }
     uint32 frameDepth() const { return stackDepth() + script->nfixed; }
+    inline FrameEntry *tosFe() const;
 
 #ifdef DEBUG
     void assertValidRegisterState() const;
 #endif
 
     Address addressOf(const FrameEntry *fe) const;
     Address addressForDataRemat(const FrameEntry *fe) const;