Bug 473117 - Rejigger how guards use side exits, and fix an erroneous comment. r=graydon
authorJeff Walden <jwalden@mit.edu>
Sat, 18 Apr 2009 16:47:23 -0700
changeset 27540 e8588a4a11539dd6a10a8f4467839d22ec396dfc
parent 27539 1b3a286e71a1cf299b0dc0fa5a1e249ba9b896af
child 27541 c59fd9494490252185b4b61e024e6376cedf29cb
push id6604
push userrsayre@mozilla.com
push dateMon, 20 Apr 2009 18:44:02 +0000
treeherdermozilla-central@83068fe4a1ec [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersgraydon
bugs473117
milestone1.9.2a1pre
Bug 473117 - Rejigger how guards use side exits, and fix an erroneous comment. r=graydon
js/src/jsobj.h
js/src/jstracer.cpp
js/src/jstracer.h
js/src/nanojit/Assembler.cpp
js/src/nanojit/LIR.cpp
js/src/nanojit/Native.h
js/src/trace-test.js
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -235,17 +235,24 @@ struct JSObject {
 #define STOBJ_CLEAR_PARENT(obj)                                               \
     ((obj)->fslots[JSSLOT_PARENT] = JSVAL_NULL)
 
 /*
  * We use JSObject.classword to store both JSClass* and the delegate and system
  * flags in the two least significant bits. We do *not* synchronize updates of
  * obj->classword -- API clients must take care.
  */
-#define STOBJ_GET_CLASS(obj)    ((JSClass *)((obj)->classword & ~3))
+#define JSSLOT_CLASS_MASK_BITS 3
+
+JS_ALWAYS_INLINE JSClass*
+STOBJ_GET_CLASS(const JSObject* obj)
+{
+    return (JSClass *) (obj->classword & ~JSSLOT_CLASS_MASK_BITS);
+}
+
 #define STOBJ_IS_DELEGATE(obj)  (((obj)->classword & 1) != 0)
 #define STOBJ_SET_DELEGATE(obj) ((obj)->classword |= 1)
 #define STOBJ_NULLSAFE_SET_DELEGATE(obj)                                      \
     (!(obj) || STOBJ_SET_DELEGATE((JSObject*)obj))
 #define STOBJ_IS_SYSTEM(obj)    (((obj)->classword & 2) != 0)
 #define STOBJ_SET_SYSTEM(obj)   ((obj)->classword |= 2)
 
 #define STOBJ_GET_PRIVATE(obj)                                                \
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -2098,17 +2098,17 @@ TraceRecorder::determineSlotType(jsval* 
         ? (isPromoteInt(i) ? JSVAL_INT : JSVAL_DOUBLE)
         : JSVAL_IS_NULL(*vp)
         ? JSVAL_TNULL
         : JSVAL_TAG(*vp);
     JS_ASSERT((m != JSVAL_INT) || isInt32(*vp));
     return m;
 }
 
-JS_REQUIRES_STACK LIns*
+JS_REQUIRES_STACK VMSideExit*
 TraceRecorder::snapshot(ExitType exitType)
 {
     JSStackFrame* fp = cx->fp;
     JSFrameRegs* regs = fp->regs;
     jsbytecode* pc = regs->pc;
 
     /* Check for a return-value opcode that needs to restart at the next instruction. */
     const JSCodeSpec& cs = js_CodeSpec[*pc];
@@ -2164,152 +2164,138 @@ TraceRecorder::snapshot(ExitType exitTyp
            trees returning on a break goto, which the outer recorder then would confuse with
            a break in the outer tree. */
         if (*pc == JSOP_GOTO)
             pc += GET_JUMP_OFFSET(pc);
         else if (*pc == JSOP_GOTOX)
             pc += GET_JUMPX_OFFSET(pc);
     }
 
-    JS_STATIC_ASSERT (sizeof(GuardRecord) + sizeof(VMSideExit) < MAX_SKIP_BYTES);
-
     /*
-     * Check if we already have a matching side exit. If so use that side exit structure
-     * by cloning it, otherwise we have to create our own.
+     * Check if we already have a matching side exit; if so we can return that
+     * side exit instead of creating a new one.
      */
     VMSideExit** exits = treeInfo->sideExits.data();
     unsigned nexits = treeInfo->sideExits.length();
     if (exitType == LOOP_EXIT) {
         for (unsigned n = 0; n < nexits; ++n) {
             VMSideExit* e = exits[n];
             if (e->pc == pc && e->imacpc == fp->imacpc &&
                 !memcmp(getFullTypeMap(exits[n]), typemap, typemap_size)) {
                 AUDIT(mergedLoopExits);
-                return clone(exits[n]);
+                return e;
             }
         }
     }
 
-    if (sizeof(GuardRecord) +
-        sizeof(VMSideExit) +
-        (stackSlots + ngslots) * sizeof(uint8) >= MAX_SKIP_BYTES) {
+    if (sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(uint8) >= MAX_SKIP_BYTES) {
         /*
          * ::snapshot() is infallible in the sense that callers don't
          * expect errors; but this is a trace-aborting error condition. So
          * mangle the request to consume zero slots, and mark the tree as
          * to-be-trashed. This should be safe as the trace will be aborted
          * before assembly or execution due to the call to
          * trackNativeStackUse above.
          */
         stackSlots = 0;
         ngslots = 0;
         trashSelf = true;
     }
 
-    /* We couldn't find a matching side exit, so create our own side exit structure. */
-    LIns* data = lir->skip(sizeof(GuardRecord) +
-                           sizeof(VMSideExit) +
-                           (stackSlots + ngslots) * sizeof(uint8));
-    GuardRecord* rec = (GuardRecord*)data->payload();
-    VMSideExit* exit = (VMSideExit*)(rec + 1);
-
-    /* Setup guard record structure. */
-    memset(rec, 0, sizeof(GuardRecord));
-    rec->exit = exit;
+    /* We couldn't find a matching side exit, so create a new one. */
+    LIns* data = lir->skip(sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(uint8));
+    VMSideExit* exit = (VMSideExit*) data->payload();
 
     /* Setup side exit structure. */
     memset(exit, 0, sizeof(VMSideExit));
     exit->from = fragment;
     exit->calldepth = callDepth;
     exit->numGlobalSlots = ngslots;
     exit->numStackSlots = stackSlots;
     exit->numStackSlotsBelowCurrentFrame = cx->fp->callee
         ? nativeStackOffset(&cx->fp->argv[-2])/sizeof(double)
         : 0;
     exit->exitType = exitType;
-    exit->addGuard(rec);
     exit->block = fp->blockChain;
     exit->pc = pc;
     exit->imacpc = fp->imacpc;
     exit->sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase;
     exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
     memcpy(getFullTypeMap(exit), typemap, typemap_size);
-
-    /* BIG FAT WARNING: If compilation fails, we currently don't reset the lirbuf so its safe
-       to keep references to the side exits here. If we ever start rewinding those lirbufs,
-       we have to make sure we purge the side exits that then no longer will be in valid
-       memory. */
-    if (exitType == LOOP_EXIT)
-        treeInfo->sideExits.add(exit);
-    return data;
-}
-
-JS_REQUIRES_STACK LIns*
-TraceRecorder::clone(VMSideExit* exit)
-{
-    LIns* data = lir->skip(sizeof(GuardRecord));
-    GuardRecord* rec = (GuardRecord*)data->payload();
-    /* setup guard record structure with shared side exit */
-    memset(rec, 0, sizeof(GuardRecord));
-    rec->exit = exit;
-    exit->addGuard(rec);
-    return data;
+    return exit;
 }
 
 JS_REQUIRES_STACK LIns*
+TraceRecorder::createGuardRecord(VMSideExit* exit)
+{
+    LIns* guardRec = lir->skip(sizeof(GuardRecord));
+    GuardRecord* gr = (GuardRecord*) guardRec->payload();
+
+    memset(gr, 0, sizeof(GuardRecord));
+    gr->exit = exit;
+    exit->addGuard(gr);
+
+    return guardRec;
+}
+
+/*
+ * Emit a guard for condition (cond), expecting to evaluate to boolean result
+ * (expected) and using the supplied side exit if the conditon doesn't hold.
+ */
+JS_REQUIRES_STACK void
+TraceRecorder::guard(bool expected, LIns* cond, VMSideExit* exit)
+{
+    LIns* guardRec = createGuardRecord(exit);
+
+    /*
+     * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
+     * safe to keep references to the side exits here. If we ever start
+     * rewinding those lirbufs, we have to make sure we purge the side exits
+     * that then no longer will be in valid memory.
+     */
+    if (exit->exitType == LOOP_EXIT)
+        treeInfo->sideExits.add(exit);
+
+    if (!cond->isCond()) {
+        expected = !expected;
+        cond = lir->ins_eq0(cond);
+    }
+
+    LIns* guardIns =
+        lir->insGuard(expected ? LIR_xf : LIR_xt, cond, guardRec);
+    if (guardIns) {
+        debug_only_v(printf("    SideExit=%p exitType=%d\n", (void*)exit, exit->exitType);)
+    } else {
+        debug_only_v(printf("    redundant guard, eliminated\n");)
+    }
+}
+
+JS_REQUIRES_STACK VMSideExit*
 TraceRecorder::copy(VMSideExit* copy)
 {
-    unsigned typemap_size = copy->numGlobalSlots + copy->numStackSlots;
-    LIns* data = lir->skip(sizeof(GuardRecord) +
-                           sizeof(VMSideExit) +
+    size_t typemap_size = copy->numGlobalSlots + copy->numStackSlots;
+    LIns* data = lir->skip(sizeof(VMSideExit) +
                            typemap_size * sizeof(uint8));
-    GuardRecord* rec = (GuardRecord*)data->payload();
-    VMSideExit* exit = (VMSideExit*)(rec + 1);
-
-    /* Setup guard record structure. */
-    memset(rec, 0, sizeof(GuardRecord));
-    rec->exit = exit;
+    VMSideExit* exit = (VMSideExit*) data->payload();
 
     /* Copy side exit structure. */
     memcpy(exit, copy, sizeof(VMSideExit) + typemap_size * sizeof(uint8));
-    exit->guards = rec;
+    exit->guards = NULL;
     exit->from = fragment;
     exit->target = NULL;
 
-    /* BIG FAT WARNING: If compilation fails, we currently don't reset the lirbuf so its safe
-       to keep references to the side exits here. If we ever start rewinding those lirbufs,
-       we have to make sure we purge the side exits that then no longer will be in valid
-       memory. */
+    /*
+     * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
+     * safe to keep references to the side exits here. If we ever start
+     * rewinding those lirbufs, we have to make sure we purge the side exits
+     * that then no longer will be in valid memory.
+     */
     if (exit->exitType == LOOP_EXIT)
         treeInfo->sideExits.add(exit);
-    return data;
-}
-
-/* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected)
-   and using the supplied side exit if the conditon doesn't hold. */
-JS_REQUIRES_STACK void
-TraceRecorder::guard(bool expected, LIns* cond, LIns* exit)
-{
-    if (!cond->isCond()) {
-        expected = !expected;
-        cond = lir->ins_eq0(cond);
-    }
-#ifdef DEBUG
-    LIns* guard =
-#endif
-    lir->insGuard(expected ? LIR_xf : LIR_xt, cond, exit);
-#ifdef DEBUG
-    if (guard) {
-        GuardRecord* lr = guard->record();
-        VMSideExit* e = (VMSideExit*)lr->exit;
-        debug_only_v(printf("    lr=%p exitType=%d\n", (void*)e, e->exitType);)
-    } else {
-        debug_only_v(printf("    redundant guard, eliminated\n");)
-    }
-#endif
+    return exit;
 }
 
 /* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected)
    and generate a side exit with type exitType to jump to if the condition does not hold. */
 JS_REQUIRES_STACK void
 TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType)
 {
     guard(expected, cond, snapshot(exitType));
@@ -2679,32 +2665,28 @@ TraceRecorder::closeLoop(JSTraceMonitor*
     /*
      * We should have arrived back at the loop header, and hence we don't want to be in an imacro
      * here and the opcode should be either JSOP_LOOP, or in case this loop was blacklisted in the
      * meantime JSOP_NOP.
      */
     JS_ASSERT((*cx->fp->regs->pc == JSOP_LOOP || *cx->fp->regs->pc == JSOP_NOP) && !cx->fp->imacpc);
 
     bool stable;
-    LIns* exitIns;
     Fragment* peer;
-    VMSideExit* exit;
     VMFragment* peer_root;
     Fragmento* fragmento = tm->fragmento;
 
-    exitIns = snapshot(UNSTABLE_LOOP_EXIT);
-    exit = (VMSideExit*)((GuardRecord*)exitIns->payload())->exit;
-
     if (callDepth != 0) {
         debug_only_v(printf("Blacklisted: stack depth mismatch, possible recursion.\n");)
         js_Blacklist(fragment->root);
         trashSelf = true;
         return;
     }
 
+    VMSideExit* exit = snapshot(UNSTABLE_LOOP_EXIT);
     JS_ASSERT(exit->numStackSlots == treeInfo->nStackTypes);
 
     VMFragment* root = (VMFragment*)fragment->root;
     peer_root = getLoop(traceMonitor, root->ip, root->globalObj, root->globalShape);
     JS_ASSERT(peer_root != NULL);
 
     stable = deduceTypeStability(peer_root, &peer, demote);
 
@@ -2719,17 +2701,17 @@ TraceRecorder::closeLoop(JSTraceMonitor*
     }
 
     if (stable && demote) {
         JS_ASSERT(fragment->kind == LoopTrace);
         return;
     }
 
     if (!stable) {
-        fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), exitIns);
+        fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(exit));
 
         /*
          * If we didn't find a type stable peer, we compile the loop anyway and
          * hope it becomes stable later.
          */
         if (!peer) {
             /*
              * If such a fragment does not exist, let's compile the loop ahead
@@ -2748,17 +2730,17 @@ TraceRecorder::closeLoop(JSTraceMonitor*
             exit->target = peer;
             debug_only_v(printf("Joining type-unstable trace to target fragment %p.\n", (void*)peer);)
             stable = true;
             ((TreeInfo*)peer->vmprivate)->dependentTrees.addUnique(fragment->root);
             treeInfo->linkedTrees.addUnique(peer);
         }
     } else {
         exit->target = fragment->root;
-        fragment->lastIns = lir->insGuard(LIR_loop, lir->insImm(1), exitIns);
+        fragment->lastIns = lir->insGuard(LIR_loop, lir->insImm(1), createGuardRecord(exit));
     }
     compile(tm);
 
     if (fragmento->assm()->error() != nanojit::None)
         return;
 
     joinEdgesToEntry(fragmento, peer_root);
 
@@ -2858,26 +2840,25 @@ TraceRecorder::joinEdgesToEntry(Fragment
     debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip,
                                       peer_root->globalObj, peer_root->globalShape);)
 }
 
 /* Emit an always-exit guard and compile the tree (used for break statements. */
 JS_REQUIRES_STACK void
 TraceRecorder::endLoop(JSTraceMonitor* tm)
 {
-    LIns* exitIns = snapshot(LOOP_EXIT);
-
     if (callDepth != 0) {
         debug_only_v(printf("Blacklisted: stack depth mismatch, possible recursion.\n");)
         js_Blacklist(fragment->root);
         trashSelf = true;
         return;
     }
 
-    fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), exitIns);
+    fragment->lastIns =
+        lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(snapshot(LOOP_EXIT)));
     compile(tm);
 
     if (tm->fragmento->assm()->error() != nanojit::None)
         return;
 
     VMFragment* root = (VMFragment*)fragment->root;
     joinEdgesToEntry(tm->fragmento, getLoop(tm, root->ip, root->globalObj, root->globalShape));
 
@@ -5149,18 +5130,20 @@ TraceRecorder::alu(LOpcode v, jsdouble v
                 return lir->ins1(LIR_i2f, lir->insImm(jsint(r)));
             /*
              * Speculatively generate code that will perform the addition over
              * the integer inputs as an integer addition/subtraction and exit
              * if that fails.
              */
             v = (LOpcode)((int)v & ~LIR64);
             LIns* result = lir->ins2(v, d0, d1);
-            if (!result->isconst() && (!overflowSafe(d0) || !overflowSafe(d1)))
-                lir->insGuard(LIR_xt, lir->ins1(LIR_ov, result), snapshot(OVERFLOW_EXIT));
+            if (!result->isconst() && (!overflowSafe(d0) || !overflowSafe(d1))) {
+                VMSideExit* exit = snapshot(OVERFLOW_EXIT);
+                lir->insGuard(LIR_xt, lir->ins1(LIR_ov, result), createGuardRecord(exit));
+            }
             return lir->ins1(LIR_i2f, result);
         }
         /*
          * The result doesn't fit into the integer domain, so either generate
          * a floating point constant or a floating point operation.
          */
         if (s0->isconst() && s1->isconst())
             return lir->insImmf(r);
@@ -5323,21 +5306,21 @@ TraceRecorder::tableswitch()
     /* Generate switch LIR. */
     LIns* si_ins = lir_buf_writer->skip(sizeof(SwitchInfo));
     SwitchInfo* si = (SwitchInfo*) si_ins->payload();
     si->count = high + 1 - low;
     si->table = 0;
     si->index = (uint32) -1;
     LIns* diff = lir->ins2(LIR_sub, v_ins, lir->insImm(low));
     LIns* cmp = lir->ins2(LIR_ult, diff, lir->insImm(si->count));
-    lir->insGuard(LIR_xf, cmp, snapshot(DEFAULT_EXIT));
+    lir->insGuard(LIR_xf, cmp, createGuardRecord(snapshot(DEFAULT_EXIT)));
     lir->insStore(diff, lir->insImmPtr(&si->index), lir->insImm(0));
-    LIns* exit = snapshot(CASE_EXIT);
-    ((GuardRecord*) exit->payload())->exit->switchInfo = si;
-    return lir->insGuard(LIR_xtbl, diff, exit);
+    VMSideExit* exit = snapshot(CASE_EXIT);
+    exit->switchInfo = si;
+    return lir->insGuard(LIR_xtbl, diff, createGuardRecord(exit));
 }
 #endif
 
 JS_REQUIRES_STACK bool
 TraceRecorder::switchop()
 {
     jsval& v = stackval(-1);
     LIns* v_ins = get(&v);
@@ -6186,17 +6169,17 @@ TraceRecorder::box_jsval(jsval v, LIns*&
       default:
         JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING);
         v_ins = lir->ins2(LIR_pior, v_ins, INS_CONST(JSVAL_STRING));
         return;
     }
 }
 
 JS_REQUIRES_STACK void
-TraceRecorder::unbox_jsval(jsval v, LIns*& v_ins, LIns* exit)
+TraceRecorder::unbox_jsval(jsval v, LIns*& v_ins, VMSideExit* exit)
 {
     if (isNumber(v)) {
         // JSVAL_IS_NUMBER(v)
         guard(false,
               lir->ins_eq0(lir->ins2(LIR_pior,
                                      lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_INT)),
                                      lir->ins2i(LIR_eq,
                                                 lir->ins2(LIR_piand, v_ins,
@@ -6292,22 +6275,22 @@ TraceRecorder::getThis(LIns*& this_ins)
         LIns* ops_ins = lir->insLoad(LIR_ldp, map_ins, (int)offsetof(JSObjectMap, ops));
         LIns* op_ins = lir->insLoad(LIR_ldp, ops_ins, (int)offsetof(JSObjectOps, thisObject));
         guard(true, lir->ins_eq0(op_ins), MISMATCH_EXIT);
     }
     return true;
 }
 
 JS_REQUIRES_STACK bool
-TraceRecorder::guardClass(JSObject* obj, LIns* obj_ins, JSClass* clasp, LIns* exit)
+TraceRecorder::guardClass(JSObject* obj, LIns* obj_ins, JSClass* clasp, VMSideExit* exit)
 {
     bool cond = STOBJ_GET_CLASS(obj) == clasp;
 
     LIns* class_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, classword));
-    class_ins = lir->ins2(LIR_piand, class_ins, lir->insImm(~3));
+    class_ins = lir->ins2(LIR_piand, class_ins, lir->insImm(~JSSLOT_CLASS_MASK_BITS));
 
     char namebuf[32];
     JS_snprintf(namebuf, sizeof namebuf, "guard(class is %s)", clasp->name);
     guard(cond, addName(lir->ins2(LIR_eq, class_ins, INS_CONSTPTR(clasp)), namebuf), exit);
     return cond;
 }
 
 JS_REQUIRES_STACK bool
@@ -6319,17 +6302,17 @@ TraceRecorder::guardDenseArray(JSObject*
 JS_REQUIRES_STACK bool
 TraceRecorder::guardDenseArrayIndex(JSObject* obj, jsint idx, LIns* obj_ins,
                                     LIns* dslots_ins, LIns* idx_ins, ExitType exitType)
 {
     jsuint capacity = js_DenseArrayCapacity(obj);
 
     bool cond = (jsuint(idx) < jsuint(obj->fslots[JSSLOT_ARRAY_LENGTH]) && jsuint(idx) < capacity);
     if (cond) {
-        LIns* exit = snapshot(exitType);
+        VMSideExit* exit = snapshot(exitType);
         /* Guard array length */
         guard(true,
               lir->ins2(LIR_ult, idx_ins, stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH)),
               exit);
         /* dslots must not be NULL */
         guard(false,
               lir->ins_eq0(dslots_ins),
               exit);
@@ -6353,17 +6336,17 @@ TraceRecorder::guardDenseArrayIndex(JSOb
         /* If not idx < capacity, stay on trace (and read value as undefined). */
         LIns* br3 = lir->insBranch(LIR_jf,
                                    lir->ins2(LIR_ult,
                                              idx_ins,
                                              lir->insLoad(LIR_ldp,
                                                           dslots_ins,
                                                           -(int)sizeof(jsval))),
                                    NULL);
-        lir->insGuard(LIR_x, lir->insImm(1), snapshot(exitType));
+        lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(snapshot(exitType)));
         LIns* label = lir->ins0(LIR_label);
         br1->target(label);
         br2->target(label);
         br3->target(label);
     }
     return cond;
 }
 
@@ -6797,19 +6780,21 @@ TraceRecorder::record_JSOP_NEG()
            that's zero.
          */
         if (isPromoteInt(a) &&
             (!JSVAL_IS_INT(v) || JSVAL_TO_INT(v) != 0) &&
             (!JSVAL_IS_DOUBLE(v) || !JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v))) &&
             -asNumber(v) == (int)-asNumber(v)) {
             a = lir->ins1(LIR_neg, ::demote(lir, a));
             if (!a->isconst()) {
-                LIns* exit = snapshot(OVERFLOW_EXIT);
-                lir->insGuard(LIR_xt, lir->ins1(LIR_ov, a), exit);
-                lir->insGuard(LIR_xt, lir->ins2(LIR_eq, a, lir->insImm(0)), exit);
+                VMSideExit* exit = snapshot(OVERFLOW_EXIT);
+                lir->insGuard(LIR_xt, lir->ins1(LIR_ov, a),
+                              createGuardRecord(exit));
+                lir->insGuard(LIR_xt, lir->ins2(LIR_eq, a, lir->insImm(0)),
+                              createGuardRecord(exit));
             }
             a = lir->ins1(LIR_i2f, a);
         } else {
             a = lir->ins1(LIR_fneg, a);
         }
 
         set(&v, a);
         return true;
@@ -6977,23 +6962,22 @@ TraceRecorder::emitNativeCall(JSTraceabl
     bool constructing = known->flags & JSTN_CONSTRUCTOR;
 
     if (JSTN_ERRTYPE(known) == FAIL_STATUS) {
         // This needs to capture the pre-call state of the stack. So do not set
         // pendingTraceableNative before taking this snapshot.
         JS_ASSERT(!pendingTraceableNative);
 
         // Take snapshot for deep LeaveTree and store it in cx->bailExit.
-        LIns* rec_ins = snapshot(DEEP_BAIL_EXIT);
-        GuardRecord* rec = (GuardRecord *) rec_ins->payload();
-        JS_ASSERT(rec->exit);
-        lir->insStorei(INS_CONSTPTR(rec->exit), cx_ins, offsetof(JSContext, bailExit));
+        VMSideExit* exit = snapshot(DEEP_BAIL_EXIT);
+        lir->insStorei(INS_CONSTPTR(exit), cx_ins, offsetof(JSContext, bailExit));
 
         // Tell nanojit not to discard or defer stack writes before this call.
-        lir->insGuard(LIR_xbarrier, rec_ins, rec_ins);
+        LIns* guardRec = createGuardRecord(exit);
+        lir->insGuard(LIR_xbarrier, guardRec, guardRec);
     }
 
     LIns* res_ins = lir->insCall(known->builtin, args);
     if (!constructing)
         rval_ins = res_ins;
     switch (JSTN_ERRTYPE(known)) {
       case FAIL_NULL:
         guard(false, lir->ins_eq0(res_ins), OOM_EXIT);
@@ -7888,37 +7872,40 @@ TraceRecorder::record_JSOP_CALLDSLOT()
     return false;
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::guardCallee(jsval& callee)
 {
     JS_ASSERT(VALUE_IS_FUNCTION(cx, callee));
 
-    LIns* exit = snapshot(BRANCH_EXIT);
+    VMSideExit* branchExit = snapshot(BRANCH_EXIT);
     JSObject* callee_obj = JSVAL_TO_OBJECT(callee);
     LIns* callee_ins = get(&callee);
 
-    /*
-     * NB: The following guard guards at runtime that the callee is a
-     * function. Even if the given value is an object that doesn't have
-     * a private slot, the value we're matching against is not forgeable.
-     */
+    guard(true,
+          lir->ins2(LIR_eq,
+                    lir->ins2(LIR_piand,
+                              lir->insLoad(LIR_ldp, callee_ins,
+                                           offsetof(JSObject, classword)),
+                              INS_CONSTPTR((void*)(~JSSLOT_CLASS_MASK_BITS))),
+                    INS_CONSTPTR(&js_FunctionClass)),
+          snapshot(MISMATCH_EXIT));
     guard(true,
           lir->ins2(LIR_eq,
                     lir->ins2(LIR_piand,
                               stobj_get_fslot(callee_ins, JSSLOT_PRIVATE),
                               INS_CONSTPTR((void*)(~JSVAL_INT))),
                     INS_CONSTPTR(OBJ_GET_PRIVATE(cx, callee_obj))),
-          exit);
+          branchExit);
     guard(true,
           lir->ins2(LIR_eq,
                     stobj_get_fslot(callee_ins, JSSLOT_PARENT),
                     INS_CONSTPTR(OBJ_GET_PARENT(cx, callee_obj))),
-          exit);
+          branchExit);
     return true;
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, bool constructing)
 {
     if (JS_GetGlobalForObject(cx, JSVAL_TO_OBJECT(fval)) != globalObj)
         ABORT_TRACE("JSOP_CALL or JSOP_NEW crosses global scopes");
@@ -8238,17 +8225,17 @@ TraceRecorder::prop(JSObject* obj, LIns*
 
     /* Check for non-existent property reference, which results in undefined. */
     const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
     if (PCVAL_IS_NULL(pcval)) {
         /*
          * This trace will be valid as long as neither the object nor any object
          * on its prototype chain change shape.
          */
-        LIns* exit = snapshot(BRANCH_EXIT);
+        VMSideExit* exit = snapshot(BRANCH_EXIT);
         for (;;) {
             LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
             LIns* ops_ins;
             if (map_is_native(obj->map, map_ins, ops_ins)) {
                 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
                                           "shape");
                 guard(true,
                       addName(lir->ins2i(LIR_eq, shape_ins, OBJ_SHAPE(obj)), "guard(shape)"),
@@ -8352,17 +8339,17 @@ TraceRecorder::elem(jsval& oval, jsval& 
     if (!guardDenseArrayIndex(obj, i, obj_ins, dslots_ins, idx_ins, BRANCH_EXIT)) {
         /*
          * If we read a hole, make sure at recording time and at runtime that nothing along
          * the prototype has numeric properties.
          */
         if (js_PrototypeHasIndexedProperties(cx, obj))
             return false;
 
-        LIns* exit = snapshot(BRANCH_EXIT);
+        VMSideExit* exit = snapshot(BRANCH_EXIT);
         while ((obj = JSVAL_TO_OBJECT(obj->fslots[JSSLOT_PROTO])) != NULL) {
             obj_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
             LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
             LIns* ops_ins;
             if (!map_is_native(obj->map, map_ins, ops_ins))
                 ABORT_TRACE("non-native object involved along prototype chain");
 
             LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -435,17 +435,17 @@ class TraceRecorder : public avmplus::GC
     JS_REQUIRES_STACK void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned stackSlots,
                                   unsigned callDepth, unsigned ngslots, uint8* typeMap);
     void trackNativeStackUse(unsigned slots);
 
     JS_REQUIRES_STACK bool isValidSlot(JSScope* scope, JSScopeProperty* sprop);
     JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot);
 
     JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, ExitType exitType);
-    JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, nanojit::LIns* exit);
+    JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, VMSideExit* exit);
 
     nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
 
     nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset);
     JS_REQUIRES_STACK void set(jsval* p, nanojit::LIns* l, bool initializing = false);
     JS_REQUIRES_STACK nanojit::LIns* get(jsval* p);
     JS_REQUIRES_STACK bool known(jsval* p);
     JS_REQUIRES_STACK void checkForGlobalObjectReallocation();
@@ -531,19 +531,19 @@ class TraceRecorder : public avmplus::GC
                                 nanojit::LIns*& v_ins);
     JS_REQUIRES_STACK bool elem(jsval& oval, jsval& idx, jsval*& vp, nanojit::LIns*& v_ins,
                                 nanojit::LIns*& addr_ins);
     JS_REQUIRES_STACK bool getProp(JSObject* obj, nanojit::LIns* obj_ins);
     JS_REQUIRES_STACK bool getProp(jsval& v);
     JS_REQUIRES_STACK bool getThis(nanojit::LIns*& this_ins);
 
     JS_REQUIRES_STACK void box_jsval(jsval v, nanojit::LIns*& v_ins);
-    JS_REQUIRES_STACK void unbox_jsval(jsval v, nanojit::LIns*& v_ins, nanojit::LIns* exit);
+    JS_REQUIRES_STACK void unbox_jsval(jsval v, nanojit::LIns*& v_ins, VMSideExit* exit);
     JS_REQUIRES_STACK bool guardClass(JSObject* obj, nanojit::LIns* obj_ins, JSClass* clasp,
-                                      nanojit::LIns* exit);
+                                      VMSideExit* exit);
     JS_REQUIRES_STACK bool guardDenseArray(JSObject* obj, nanojit::LIns* obj_ins,
                                            ExitType exitType = MISMATCH_EXIT);
     JS_REQUIRES_STACK bool guardDenseArrayIndex(JSObject* obj, jsint idx, nanojit::LIns* obj_ins,
                                                 nanojit::LIns* dslots_ins, nanojit::LIns* idx_ins,
                                                 ExitType exitType);
     JS_REQUIRES_STACK bool guardNotGlobalObject(JSObject* obj, nanojit::LIns* obj_ins);
     void clearFrameSlotsFromCache();
     JS_REQUIRES_STACK bool guardCallee(jsval& callee);
@@ -574,19 +574,37 @@ public:
     TraceRecorder(JSContext* cx, VMSideExit*, nanojit::Fragment*, TreeInfo*,
                   unsigned stackSlots, unsigned ngslots, uint8* typeMap,
                   VMSideExit* expectedInnerExit, jsbytecode* outerTree);
     ~TraceRecorder();
 
     static JS_REQUIRES_STACK JSMonitorRecordingStatus monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op);
 
     JS_REQUIRES_STACK uint8 determineSlotType(jsval* vp);
-    JS_REQUIRES_STACK nanojit::LIns* snapshot(ExitType exitType);
-    nanojit::LIns* clone(VMSideExit* exit);
-    nanojit::LIns* copy(VMSideExit* exit);
+
+    /*
+     * Examines current interpreter state to record information suitable for
+     * returning to the interpreter through a side exit of the given type.
+     */
+    JS_REQUIRES_STACK VMSideExit* snapshot(ExitType exitType);
+
+    /*
+     * Creates a separate but identical copy of the given side exit, allowing
+     * the guards associated with each to be entirely separate even after
+     * subsequent patching.
+     */
+    JS_REQUIRES_STACK VMSideExit* copy(VMSideExit* exit);
+
+    /*
+     * Creates an instruction whose payload is a GuardRecord for the given exit.
+     * The instruction is suitable for use as the final argument of a single
+     * call to LirBuffer::insGuard; do not reuse the returned value.
+     */
+    JS_REQUIRES_STACK nanojit::LIns* createGuardRecord(VMSideExit* exit);
+
     nanojit::Fragment* getFragment() const { return fragment; }
     TreeInfo* getTreeInfo() const { return treeInfo; }
     JS_REQUIRES_STACK void compile(JSTraceMonitor* tm);
     JS_REQUIRES_STACK void closeLoop(JSTraceMonitor* tm, bool& demote);
     JS_REQUIRES_STACK void endLoop(JSTraceMonitor* tm);
     JS_REQUIRES_STACK void joinEdgesToEntry(nanojit::Fragmento* fragmento,
                                             VMFragment* peer_root);
     void blacklist() { fragment->blacklist(); }
--- a/js/src/nanojit/Assembler.cpp
+++ b/js/src/nanojit/Assembler.cpp
@@ -667,16 +667,17 @@ namespace nanojit
 		_allocator.addFree(r);
 	}
 
     void Assembler::patch(GuardRecord *lr)
     {
         Fragment *frag = lr->exit->target;
 		NanoAssert(frag->fragEntry != 0);
 		NIns* was = nPatchBranch((NIns*)lr->jmp, frag->fragEntry);
+		NanoAssert(frag->fragEntry != was);
 		verbose_only(verbose_outputf("patching jump at %p to target %p (was %p)\n",
 			lr->jmp, frag->fragEntry, was);)
 		(void)was;
     }
 
     void Assembler::patch(SideExit *exit)
     {
         GuardRecord *rec = exit->guards;
--- a/js/src/nanojit/LIR.cpp
+++ b/js/src/nanojit/LIR.cpp
@@ -175,16 +175,18 @@ namespace nanojit
 	}
 
 	LInsp LirBufWriter::insLinkTo(LOpcode op, LInsp to)
 	{
 		LInsp l = _buf->next();
 		NanoAssert(samepage(l,l+LIR_FAR_SLOTS)); // must have called ensureRoom()
         if (can24bReach(l,to))
 		{
+		    NanoStaticAssert(LIR_nearskip == LIR_skip - 1);
+		    NanoStaticAssert(LIR_neartramp == LIR_tramp - 1);
             l->initOpcode(LOpcode(op-1)); // nearskip or neartramp
             l->setimm24(to-l);
             _buf->commit(1);
 			_buf->_stats.lir++;
         }
         else
 		{
 			l = insLinkToFar(op,to);
--- a/js/src/nanojit/Native.h
+++ b/js/src/nanojit/Native.h
@@ -75,20 +75,22 @@ namespace nanojit {
     
     struct SideExit
     {
         GuardRecord* guards;
         Fragment* from;
         Fragment* target;
 		SwitchInfo* switchInfo;
         
-        void addGuard(GuardRecord* lr) 
+        void addGuard(GuardRecord* gr)
         {
-            lr->next = guards;
-            guards = lr;
+            NanoAssert(gr->next == NULL);
+            NanoAssert(guards != gr);
+            gr->next = guards;
+            guards = gr;
         }
     };
 }
 
 	#ifdef NJ_STACK_GROWTH_UP
 		#define stack_direction(n)   n
 	#else
 		#define stack_direction(n)  -n
--- a/js/src/trace-test.js
+++ b/js/src/trace-test.js
@@ -5004,16 +5004,40 @@ function testGlobalShapeChangeAfterDeepB
     var arr = [[], [], [], ["bug0", "bug1", "bug2", "bug3", "bug4"]];
     for (var i = 0; i < arr.length; i++)
         arr[i].forEach(f);
 }
 test(testGlobalShapeChangeAfterDeepBail);
 for (let i = 0; i < 5; i++)
     delete this["bug" + i];
 
+function testFunctionIdentityChange()
+{
+  function a() {}
+  function b() {}
+
+  var o = { a: a, b: b };
+
+  for (var prop in o)
+  {
+    for (var i = 0; i < 1000; i++)
+      o[prop]();
+  }
+
+  return true;
+}
+testFunctionIdentityChange.expected = true;
+testFunctionIdentityChange.jitstats = {
+  recorderStarted: 2,
+  traceCompleted: 2,
+  sideExitIntoInterpreter: 3
+};
+test(testFunctionIdentityChange);
+
+
 /*****************************************************************************
  *                                                                           *
  *  _____ _   _  _____ ______ _____ _______                                  *
  * |_   _| \ | |/ ____|  ____|  __ \__   __|                                 *
  *   | | |  \| | (___ | |__  | |__) | | |                                    *
  *   | | | . ` |\___ \|  __| |  _  /  | |                                    *
  *  _| |_| |\  |____) | |____| | \ \  | |                                    *
  * |_____|_| \_|_____/|______|_|  \_\ |_|                                    *