Merge. Make sure we clear out all slots from the tracker when leaving a frame.
authorAndreas Gal <gal@mozilla.com>
Wed, 23 Jul 2008 15:56:27 -0700
changeset 17813 c1d3d1e0e92418c474a374bcc876aa8845fdb00b
parent 17812 7f3cc9a4bf1a39d823ef4f1a3bcbc8ecafe0553e (current diff)
parent 17811 711092884c032410bd635b1d68ef9c778afa3777 (diff)
child 17814 1fa3e45f253a841ccdfee21c062b082919f236f2
push id1452
push usershaver@mozilla.com
push dateFri, 22 Aug 2008 00:08:22 +0000
treeherderautoland@d13bb0868596 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone1.9.1a1pre
Merge. Make sure we clear out all slots from the tracker when leaving a frame.
js/src/jstracer.cpp
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -135,30 +135,26 @@ Tracker::clear()
         pagelist = pagelist->next;
         GC::Free(p);
     }
 }
 
 bool
 Tracker::has(const void *v) const
 {
-    struct Tracker::Page* p = findPage(v);
-    if (!p)
-        return false;
-    return p->map[(jsuword(v) & 0xfff) >> 2] != NULL;
+    return get(v) != NULL;
 }
 
 LIns*
 Tracker::get(const void* v) const
 {
     struct Tracker::Page* p = findPage(v);
-    JS_ASSERT(p); /* we must have a page for the slot we are looking for */
-    LIns* i = p->map[(jsuword(v) & 0xfff) >> 2];
-    JS_ASSERT(i);
-    return i;
+    if (!p)
+        return NULL;
+    return p->map[(jsuword(v) & 0xfff) >> 2];
 }
 
 void
 Tracker::set(const void* v, LIns* i)
 {
     struct Tracker::Page* p = findPage(v);
     if (!p)
         p = addPage(v);
@@ -582,17 +578,17 @@ static unsigned nativeFrameSlots(unsigne
             return slots;
         fp = fp->down;
     }
     JS_NOT_REACHED("nativeFrameSlots");
 }
 
 /* Determine the offset in the native frame (marshal) for an address
    that is part of a currently active frame. */
-size_t
+ptrdiff_t
 TraceRecorder::nativeFrameOffset(jsval* p) const
 {
     JSStackFrame* currentFrame = cx->fp;
     size_t offset = 0;
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, treeInfo->ngslots, treeInfo->gslots, callDepth,
         if (vp == p) return offset;
         offset += sizeof(double)
     );
@@ -832,28 +828,37 @@ TraceRecorder::set(jsval* p, LIns* i, bo
     /* Sink all type casts targeting the stack into the side exit by simply storing the original
        (uncasted) value. Each guard generates the side exit map based on the types of the
        last stores to every stack location, so its safe to not perform them on-trace. */
     if (isPromoteInt(i))
         i = ::demote(lir, i);
     /* If we are writing to this location for the first time, calculate the offset into the
        native frame manually, otherwise just look up the last load or store associated with
        the same source address (p) and use the same offset/base. */
-    if (initializing) { 
+    LIns* x;
+    if ((x = stackTracker.get(p)) == NULL) {
         stackTracker.set(p, lir->insStorei(i, lirbuf->sp, 
                 -treeInfo->nativeStackBase + nativeFrameOffset(p) + 8));
     } else {
-        LIns* q = stackTracker.get(p);
-        if (q->isop(LIR_ld) || q->isop(LIR_ldq)) {
-            JS_ASSERT(q->oprnd1() == lirbuf->sp);
-            lir->insStorei(i, q->oprnd1(), q->oprnd2()->constval());
+        if (x->isop(LIR_ld) || x->isop(LIR_ldq)) {
+            JS_ASSERT(x->oprnd1() == lirbuf->sp);
+            JS_ASSERT(x->oprnd2()->constval() == -treeInfo->nativeStackBase +
+                    nativeFrameOffset(p) + 8);
+            lir->insStorei(i, x->oprnd1(), x->oprnd2()->constval());
+        } else if (x->isop(LIR_st) || x->isop(LIR_stq)) {
+            JS_ASSERT(x->oprnd2() == lirbuf->sp);
+            JS_ASSERT(x->oprnd3()->constval() == -treeInfo->nativeStackBase +
+                    nativeFrameOffset(p) + 8);
+            lir->insStorei(i, x->oprnd2(), x->oprnd3()->constval());
         } else {
-            JS_ASSERT(q->isop(LIR_sti) || q->isop(LIR_stqi));
-            JS_ASSERT(q->oprnd2() == lirbuf->sp);
-            lir->insStorei(i, q->oprnd2(), q->immdisp());
+            JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
+            JS_ASSERT(x->oprnd2() == lirbuf->sp);
+            JS_ASSERT(x->immdisp() == -treeInfo->nativeStackBase +
+                    nativeFrameOffset(p) + 8);
+            lir->insStorei(i, x->oprnd2(), x->immdisp());
         }
     }
 }
 
 LIns*
 TraceRecorder::get(jsval* p)
 {
     return tracker.get(p);
@@ -912,17 +917,17 @@ TraceRecorder::checkType(jsval& v, uint8
                    we can be sure it will always be an int. If we see INCVAR, we similarly
                    speculate that the result will be int, even though this is not
                    guaranteed and this might cause the entry map to mismatch and thus
                    the trace never to be entered. */
                 if (i->isop(LIR_i2f) ||
                         (i->isop(LIR_fadd) && i->oprnd2()->isconstq() &&
                                 fabs(i->oprnd2()->constvalf()) == 1.0)) {
 #ifdef DEBUG
-                    printf("demoting type of an entry slot #%ld, triggering re-compilation\n",
+                    printf("demoting type of an entry slot #%d, triggering re-compilation\n",
                             nativeFrameOffset(&v));
 #endif
                     JS_ASSERT(!TYPEMAP_GET_FLAG(t, TYPEMAP_FLAG_DEMOTE) ||
                             TYPEMAP_GET_FLAG(t, TYPEMAP_FLAG_DONT_DEMOTE));
                     TYPEMAP_SET_FLAG(t, TYPEMAP_FLAG_DEMOTE);
                     TYPEMAP_SET_TYPE(t, JSVAL_INT);
                     AUDIT(slotDemoted);
                     recompileFlag = true;
@@ -937,17 +942,17 @@ TraceRecorder::checkType(jsval& v, uint8
            If not, than demoting the slot didn't work out. Flag the slot to be not
            demoted again. */
         JS_ASSERT(TYPEMAP_GET_TYPE(t) == JSVAL_INT &&
                 TYPEMAP_GET_FLAG(t, TYPEMAP_FLAG_DEMOTE) &&
                 !TYPEMAP_GET_FLAG(t, TYPEMAP_FLAG_DONT_DEMOTE));
         if (!i->isop(LIR_i2f)) {
             AUDIT(slotPromoted);
 #ifdef DEBUG
-            printf("demoting type of a slot #%ld failed, locking it and re-compiling\n",
+            printf("demoting type of a slot #%d failed, locking it and re-compiling\n",
                     nativeFrameOffset(&v));
 #endif
             TYPEMAP_SET_FLAG(t, TYPEMAP_FLAG_DONT_DEMOTE);
             TYPEMAP_SET_TYPE(t, JSVAL_DOUBLE);
             recompileFlag = true;
             return true; /* keep going, recompileFlag will trigger error when we are done with
                             all the slots */
 
@@ -1841,17 +1846,33 @@ bool TraceRecorder::guardDenseArrayIndex
     guard(false, lir->ins_eq0(dslots_ins));
     guard(true, lir->ins2(LIR_lt, idx_ins,
                           lir->insLoadi(dslots_ins, -sizeof(jsval))));
     return true;
 }
 
 bool TraceRecorder::leaveFrame()
 {
-    return (callDepth--) > 0;
+    if (callDepth > 0) {
+        /* Clear out all slots of this frame in the stackTracker. Different locations on the
+           VM stack might map to different locations on the native stack depending on the
+           number of arguments (i.e.) of the next call, so we have to make sure we map
+           those in to the cache with the right offsets. */
+        JSStackFrame* fp = cx->fp;
+        stackTracker.set(&fp->rval, (LIns*)0);
+        jsval* vp;
+        jsval* vpstop;
+        for (vp = &fp->argv[-1], vpstop = &fp->argv[fp->fun->nargs]; vp < vpstop; ++vp)
+            stackTracker.set(vp, (LIns*)0);
+        for (vp = &fp->slots[0], vpstop = &fp->slots[fp->script->nslots]; vp < vpstop; ++vp)
+            stackTracker.set(vp, (LIns*)0);
+        --callDepth;
+        return true;
+    }
+    return false;
 }
 
 bool TraceRecorder::record_JSOP_INTERRUPT()
 {
     return false;
 }
 bool TraceRecorder::record_JSOP_PUSH()
 {
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -38,16 +38,17 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #ifndef jstracer_h___
 #define jstracer_h___
 
 #include "jsstddef.h"
+#include "jstypes.h"
 #include "jslock.h"
 #include "jsnum.h"
 #include "jsinterp.h"
 
 #include "nanojit/nanojit.h"
 
 /*
  * We use a magic boxed pointer value to represent error conditions that
@@ -99,17 +100,17 @@ public:
     virtual ~TreeInfo() {
         if (typeMap) free(typeMap);
         if (gslots) free(gslots);
     }
     
     struct JSFrameRegs      entryRegs;
     unsigned                entryNativeFrameSlots;
     unsigned                maxNativeFrameSlots;
-    size_t                  nativeStackBase;
+    ptrdiff_t               nativeStackBase;
     unsigned                maxCallDepth;
     uint32                  globalShape;
     unsigned                ngslots;
     uint8                  *typeMap;
     uint16                 *gslots;
 };
 
 extern struct nanojit::CallInfo builtins[];
@@ -142,17 +143,17 @@ class TraceRecorder {
     nanojit::LirWriter*     verbose_filter;
     nanojit::LirWriter*     cse_filter;
     nanojit::LirWriter*     expr_filter;
     nanojit::LirWriter*     func_filter;
     nanojit::LIns*          cx_ins;
     nanojit::SideExit       exit;
     bool                    recompileFlag;
 
-    size_t nativeFrameOffset(jsval* p) const;
+    ptrdiff_t nativeFrameOffset(jsval* p) const;
     void import(nanojit::LIns* base, unsigned slot, jsval* p, uint8& t, 
             const char *prefix, int index, jsuword* localNames);
     void trackNativeFrameUse(unsigned slots);
 
     unsigned getCallDepth() const;
     nanojit::LIns* guard(bool expected, nanojit::LIns* cond);
     nanojit::LIns* addName(nanojit::LIns* ins, const char* name);