Backed out changeset f5ea964eb493. (Brendan Eich — High-level CSE for shape guards (518448, r=jorendorff).
authorRobert Sayre <sayrer@gmail.com>
Sat, 26 Sep 2009 08:16:58 -0700
changeset 33194 2c67a109e6265c64b3126c111de59482018242f0
parent 33183 f5ea964eb493458189bb2ff20cfff74a04e3e4d0
child 33195 a1aea817398e76061215901a1f1696a8c96052e5
push idunknown
push userunknown
push dateunknown
reviewersjorendorff
bugs518448
milestone1.9.3a1pre
backs outf5ea964eb493458189bb2ff20cfff74a04e3e4d0
Backed out changeset f5ea964eb493. (Brendan Eich — High-level CSE for shape guards (518448, r=jorendorff).
js/src/jsscope.cpp
js/src/jstracer.cpp
js/src/jstracer.h
js/src/nanojit/LIR.h
js/src/nanojit/LIRopcode.tbl
--- a/js/src/jsscope.cpp
+++ b/js/src/jsscope.cpp
@@ -1017,37 +1017,18 @@ JSScope::reportReadOnlyScope(JSContext *
     if (!bytes)
         return;
     JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_READ_ONLY, bytes);
 }
 
 void
 JSScope::generateOwnShape(JSContext *cx)
 {
-#ifdef JS_TRACER
-    if (object) {
-         js_LeaveTraceIfGlobalObject(cx, object);
-
-        /*
-         * The JIT must have arranged to re-guard after any unpredictable shape
-         * change, so if we are on trace here, we should already be prepared to
-         * bail off trace.
-         */
-        JS_ASSERT_IF(JS_ON_TRACE(cx), cx->bailExit);
-
-        /*
-         * If we are recording, here is where we forget already-guarded shapes.
-         * Any subsequent property operation upon object on the trace currently
-         * being recorded will re-guard (and re-memoize).
-         */
-        JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
-        if (TraceRecorder *tr = tm->recorder)
-            tr->forgetGuardedShapesForObject(object);
-    }
-#endif
+    if (object)
+        js_LeaveTraceIfGlobalObject(cx, object);
 
     shape = js_GenerateShape(cx, false);
     setOwnShape();
 }
 
 JSScopeProperty *
 JSScope::add(JSContext *cx, jsid id,
              JSPropertyOp getter, JSPropertyOp setter,
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -2176,18 +2176,16 @@ TraceRecorder::TraceRecorder(JSContext* 
     this->global_dslots = this->globalObj->dslots;
     this->loop = true; /* default assumption is we are compiling a loop */
     this->outer = outer;
     this->outerArgc = outerArgc;
     this->pendingSpecializedNative = NULL;
     this->newobj_ins = NULL;
     this->loopLabel = NULL;
 
-    guardedShapeTable.ops = NULL;
-
 #ifdef JS_JIT_SPEW
     debug_only_print0(LC_TMMinimal, "\n");
     debug_only_printf(LC_TMMinimal, "Recording starting from %s:%u@%u (FragID=%06u)\n",
                       ti->treeFileName, ti->treeLineNumber, ti->treePCOffset,
                       _fragment->profFragID);
 
     debug_only_printf(LC_TMTracer, "globalObj=%p, shape=%d\n",
                       (void*)this->globalObj, OBJ_SHAPE(this->globalObj));
@@ -2239,17 +2237,17 @@ TraceRecorder::TraceRecorder(JSContext* 
         } else {
             entryLabel = lir->ins0(LIR_label);
         }
         NanoAssert(entryLabel);
         NanoAssert(!fragment->loopLabel);
         fragment->loopLabel = entryLabel;
     })
 
-    lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, sp)), "sp");
+    lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
     lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
     cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
     eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
     eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
 
     /* If we came from exit, we might not have enough global types. */
     if (ti->globalSlots->length() > ti->nGlobalTypes())
         SpecializeTreesToMissingGlobals(cx, globalObj, ti);
@@ -2296,18 +2294,16 @@ TraceRecorder::~TraceRecorder()
     delete sanity_filter_1;
     delete sanity_filter_2;
 #endif
     delete cse_filter;
     delete expr_filter;
     delete func_filter;
     delete float_filter;
     delete lir_buf_writer;
-
-    forgetGuardedShapes();
 }
 
 bool
 TraceRecorder::outOfMemory()
 {
     return traceMonitor->dataAlloc->outOfMemory() || tempAlloc.outOfMemory();
 }
 
@@ -8582,146 +8578,30 @@ TraceRecorder::binary(LOpcode op)
         if (intop)
             a = lir->ins1(op == LIR_ush ? LIR_u2f : LIR_i2f, a);
         set(&l, a);
         return JSRS_CONTINUE;
     }
     return JSRS_STOP;
 }
 
-struct GuardedShapeEntry : public JSDHashEntryStub
-{
-    JSObject* obj;
-};
-
-#ifdef DEBUG
-#include <stdio.h>
-
-static FILE* shapefp = NULL;
-
-static void
-DumpShape(JSObject* obj, const char* prefix)
-{
-    JSScope* scope = OBJ_SCOPE(obj);
-
-    if (!shapefp) {
-        shapefp = fopen("/tmp/shapes.dump", "w");
-        if (!shapefp)
-            return;
-    }
-
-    fprintf(shapefp, "\n%s: shape %u flags %x\n", prefix, scope->shape, scope->flags);
-    for (JSScopeProperty* sprop = scope->lastProp; sprop; sprop = sprop->parent) {
-        if (JSID_IS_ATOM(sprop->id)) {
-            fprintf(shapefp, " %s", JS_GetStringBytes(JSVAL_TO_STRING(ID_TO_VALUE(sprop->id))));
-        } else {
-            JS_ASSERT(!JSID_IS_OBJECT(sprop->id));
-            fprintf(shapefp, " %d", JSID_TO_INT(sprop->id));
-        }
-        fprintf(shapefp, " %u %p %p %x %x %d\n",
-                sprop->slot, sprop->getter, sprop->setter, sprop->attrs, sprop->flags,
-                sprop->shortid);
-    }
-    fflush(shapefp);
-}
-
-static JSDHashOperator
-DumpShapeEnumerator(JSDHashTable* table, JSDHashEntryHdr* hdr, uint32 number, void* arg)
-{
-    GuardedShapeEntry* entry = (GuardedShapeEntry*) hdr;
-    const char* prefix = (const char*) arg;
-
-    DumpShape(entry->obj, prefix);
-    return JS_DHASH_NEXT;
-}
-
 void
-TraceRecorder::dumpGuardedShapes(const char* prefix)
-{
-    if (guardedShapeTable.ops)
-        JS_DHashTableEnumerate(&guardedShapeTable, DumpShapeEnumerator, (void*) prefix);
-}
-#endif /* DEBUG */
-
-JS_REQUIRES_STACK JSRecordingStatus
 TraceRecorder::guardShape(LIns* obj_ins, JSObject* obj, uint32 shape, const char* guardName,
                           LIns* map_ins, VMSideExit* exit)
 {
-    if (!guardedShapeTable.ops) {
-        JS_DHashTableInit(&guardedShapeTable, JS_DHashGetStubOps(), NULL,
-                          sizeof(GuardedShapeEntry), JS_DHASH_MIN_SIZE);
-    }
-
-    // Test (with add if missing) for a remembered guard for (obj_ins, obj).
-    GuardedShapeEntry* entry = (GuardedShapeEntry*)
-        JS_DHashTableOperate(&guardedShapeTable, obj_ins, JS_DHASH_ADD);
-    if (!entry) {
-        JS_ReportOutOfMemory(cx);
-        return JSRS_ERROR;
-    }
-
-    // If already guarded, emit an assertion that the shape matches.
-    if (entry->key) {
-        JS_ASSERT(entry->key == obj_ins);
-        JS_ASSERT(entry->obj == obj);
-        debug_only_stmt(
-            lir->insAssert(lir->ins2i(LIR_eq,
-                                      lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
-                                      shape));
-        )
-        return JSRS_CONTINUE;
-    }
-
-    // Not yet guarded. Remember obj_ins along with obj (for invalidation).
-    entry->key = obj_ins;
-    entry->obj = obj;
-
-    debug_only_stmt(DumpShape(obj, "guard");)
-    debug_only_stmt(fprintf(shapefp, "for obj_ins %p\n", obj_ins);)
-
-    // Finally, emit the shape guard.
     LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)), "shape");
     guard(true,
           addName(lir->ins2i(LIR_eq, shape_ins, shape), guardName),
           exit);
-    return JSRS_CONTINUE;
-}
-
-static JSDHashOperator
-ForgetGuardedShapesForObject(JSDHashTable* table, JSDHashEntryHdr* hdr, uint32 number, void* arg)
-{
-    GuardedShapeEntry* entry = (GuardedShapeEntry*) hdr;
-    if (entry->obj == arg) {
-        debug_only_stmt(DumpShape(entry->obj, "forget");)
-        return JS_DHASH_REMOVE;
-    }
-    return JS_DHASH_NEXT;
-}
-
-void
-TraceRecorder::forgetGuardedShapesForObject(JSObject* obj)
-{
-    if (guardedShapeTable.ops)
-        JS_DHashTableEnumerate(&guardedShapeTable, ForgetGuardedShapesForObject, obj);
-}
-
-void
-TraceRecorder::forgetGuardedShapes()
-{
-    if (guardedShapeTable.ops) {
-        debug_only_stmt(dumpGuardedShapes("forget-all");)
-        JS_DHashTableFinish(&guardedShapeTable);
-        guardedShapeTable.ops = NULL;
-    }
 }
 
 JS_STATIC_ASSERT(offsetof(JSObjectOps, objectMap) == 0);
 
 inline LIns*
-TraceRecorder::map(LIns* obj_ins)
+TraceRecorder::map(LIns *obj_ins)
 {
     return addName(lir->insLoad(LIR_ldp, obj_ins, (int) offsetof(JSObject, map)), "map");
 }
 
 bool
 TraceRecorder::map_is_native(JSObjectMap* map, LIns* map_ins, LIns*& ops_ins, size_t op_offset)
 {
     JS_ASSERT(op_offset < sizeof(JSObjectOps));
@@ -8891,17 +8771,17 @@ TraceRecorder::guardPropertyCacheHit(LIn
     VMSideExit* exit = snapshot(BRANCH_EXIT);
 
     uint32 vshape = PCVCAP_SHAPE(entry->vcap);
 
     // Check for first-level cache hit and guard on kshape if possible.
     // Otherwise guard on key object exact match.
     if (PCVCAP_TAG(entry->vcap) <= 1) {
         if (aobj != globalObj)
-            CHECK_STATUS(guardShape(obj_ins, aobj, entry->kshape, "guard_kshape", map_ins, exit));
+            guardShape(obj_ins, aobj, entry->kshape, "guard_kshape", map_ins, exit);
 
         if (entry->adding()) {
             if (aobj == globalObj)
                 ABORT_TRACE("adding a property to the global object");
 
             LIns *vshape_ins = addName(
                 lir->insLoad(LIR_ld,
                              addName(lir->insLoad(LIR_ldcp, cx_ins, offsetof(JSContext, runtime)),
@@ -8940,17 +8820,17 @@ TraceRecorder::guardPropertyCacheHit(LIn
         LIns* obj2_ins;
         if (PCVCAP_TAG(entry->vcap) == 1) {
             // Duplicate the special case in PROPERTY_CACHE_TEST.
             obj2_ins = addName(stobj_get_proto(obj_ins), "proto");
             guard(false, lir->ins_peq0(obj2_ins), exit);
         } else {
             obj2_ins = INS_CONSTOBJ(obj2);
         }
-        CHECK_STATUS(guardShape(obj2_ins, obj2, vshape, "guard_vshape", map(obj2_ins), exit));
+        guardShape(obj2_ins, obj2, vshape, "guard_vshape", map(obj2_ins), exit);
     }
 
     pcval = entry->vword;
     return JSRS_CONTINUE;
 }
 
 void
 TraceRecorder::stobj_set_fslot(LIns *obj_ins, unsigned slot, LIns* v_ins)
@@ -9229,17 +9109,17 @@ TraceRecorder::guardPrototypeHasNoIndexe
      * properties which might become visible through holes in the array.
      */
     VMSideExit* exit = snapshot(exitType);
 
     if (js_PrototypeHasIndexedProperties(cx, obj))
         return JSRS_STOP;
 
     while (guardHasPrototype(obj, obj_ins, &obj, &obj_ins, exit))
-        CHECK_STATUS(guardShape(obj_ins, obj, OBJ_SHAPE(obj), "guard(shape)", map(obj_ins), exit));
+        guardShape(obj_ins, obj, OBJ_SHAPE(obj), "guard(shape)", map(obj_ins), exit);
     return JSRS_CONTINUE;
 }
 
 JSRecordingStatus
 TraceRecorder::guardNotGlobalObject(JSObject* obj, LIns* obj_ins)
 {
     if (obj == globalObj)
         ABORT_TRACE("reference aliases global object");
@@ -10704,21 +10584,19 @@ TraceRecorder::setProp(jsval &l, JSPropC
      * we emit a call to the method write barrier. There's no need to guard on
      * this, because functions have distinct trace-type from other values and
      * branded-ness is implied by the shape, which we've already guarded on.
      */
     if (scope->branded() && VALUE_IS_FUNCTION(cx, v) && entry->directHit()) {
         if (obj == globalObj)
             ABORT_TRACE("can't trace function-valued property set in branded global scope");
 
-        enterDeepBailCall();
         LIns* args[] = { v_ins, INS_CONSTSPROP(sprop), obj_ins, cx_ins };
         LIns* ok_ins = lir->insCall(&MethodWriteBarrier_ci, args);
         guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);
-        leaveDeepBailCall();
     }
 
     // Find obj2. If entry->adding(), the TAG bits are all 0.
     JSObject* obj2 = obj;
     for (jsuword i = PCVCAP_TAG(entry->vcap) >> PCVCAP_PROTOBITS; i; i--)
         obj2 = OBJ_GET_PARENT(cx, obj2);
     for (jsuword j = PCVCAP_TAG(entry->vcap) & PCVCAP_PROTOMASK; j; j--)
         obj2 = OBJ_GET_PROTO(cx, obj2);
@@ -10817,19 +10695,16 @@ TraceRecorder::enterDeepBailCall()
 {
     // Take snapshot for js_DeepBail and store it in cx->bailExit.
     VMSideExit* exit = snapshot(DEEP_BAIL_EXIT);
     lir->insStorei(INS_CONSTPTR(exit), cx_ins, offsetof(JSContext, bailExit));
 
     // Tell nanojit not to discard or defer stack writes before this call.
     GuardRecord* guardRec = createGuardRecord(exit);
     lir->insGuard(LIR_xbarrier, NULL, guardRec);
-
-    // Forget about guarded shapes, since deep bailers can reshape the world.
-    forgetGuardedShapes();
     return exit;
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::leaveDeepBailCall()
 {
     // Keep cx->bailExit null when it's invalid.
     lir->insStorei(INS_NULL(), cx_ins, offsetof(JSContext, bailExit));
@@ -11973,18 +11848,17 @@ TraceRecorder::prop(JSObject* obj, LIns*
          * FIXME: This loop can become a single shape guard once bug 497789 has
          * been fixed.
          */
         VMSideExit* exit = snapshot(BRANCH_EXIT);
         do {
             LIns* map_ins = map(obj_ins);
             LIns* ops_ins;
             if (map_is_native(obj->map, map_ins, ops_ins)) {
-                CHECK_STATUS(guardShape(obj_ins, obj, OBJ_SHAPE(obj), "guard(shape)", map_ins,
-                                        exit));
+                guardShape(obj_ins, obj, OBJ_SHAPE(obj), "guard(shape)", map_ins, exit);
             } else if (!guardDenseArray(obj, obj_ins, exit)) {
                 ABORT_TRACE("non-native object involved in undefined property access");
             }
         } while (guardHasPrototype(obj, obj_ins, &obj, &obj_ins, exit));
 
         set(outp, INS_CONST(JSVAL_TO_SPECIAL(JSVAL_VOID)), true);
         return JSRS_CONTINUE;
     }
@@ -12053,20 +11927,18 @@ TraceRecorder::prop(JSObject* obj, LIns*
      * value as well as other property attributes and order, so this condition
      * is trace-invariant.
      *
      * We do not impose the method read barrier if in an imacro, assuming any
      * property gets it does (e.g., for 'toString' from JSOP_NEW) will not be
      * leaked to the calling script.
      */
     if (isMethod && !cx->fp->imacpc) {
-        enterDeepBailCall();
         LIns* args[] = { v_ins, INS_CONSTSPROP(sprop), obj_ins, cx_ins };
         v_ins = lir->insCall(&MethodReadBarrier_ci, args);
-        leaveDeepBailCall();
     }
 
     if (slotp) {
         *slotp = slot;
         *v_insp = v_ins;
     }
     if (outp)
         set(outp, v_ins, true);
@@ -12801,17 +12673,17 @@ TraceRecorder::record_JSOP_INSTANCEOF()
     jsval& val = stackval(-2);
     LIns* val_ins = box_jsval(val, get(&val));
 
     enterDeepBailCall();
     LIns* args[] = {val_ins, get(&ctor), cx_ins};
     stack(-2, lir->insCall(&HasInstance_ci, args));
     LIns* status_ins = lir->insLoad(LIR_ld,
                                     lirbuf->state,
-                                    offsetof(InterpState, builtinStatus));
+                                    (int) offsetof(InterpState, builtinStatus));
     pendingGuardCondition = lir->ins_eq0(status_ins);
     leaveDeepBailCall();
 
     return JSRS_CONTINUE;
 }
 
 JS_REQUIRES_STACK JSRecordingStatus
 TraceRecorder::record_JSOP_DEBUGGER()
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -42,17 +42,16 @@
 #ifndef jstracer_h___
 #define jstracer_h___
 
 #ifdef JS_TRACER
 
 #include "jstypes.h"
 #include "jsbuiltins.h"
 #include "jscntxt.h"
-#include "jsdhash.h"
 #include "jsinterp.h"
 #include "jslock.h"
 #include "jsnum.h"
 
 #if defined(DEBUG) && !defined(JS_JIT_SPEW)
 #define JS_JIT_SPEW
 #endif
 
@@ -833,27 +832,19 @@ class TraceRecorder {
                                                        nanojit::LIns* l_ins, nanojit::LIns* r_ins,
                                                        bool negate, bool tryBranchAfterCond,
                                                        jsval& rval);
     JS_REQUIRES_STACK JSRecordingStatus relational(nanojit::LOpcode op, bool tryBranchAfterCond);
 
     JS_REQUIRES_STACK JSRecordingStatus unary(nanojit::LOpcode op);
     JS_REQUIRES_STACK JSRecordingStatus binary(nanojit::LOpcode op);
 
-    JS_REQUIRES_STACK JSRecordingStatus guardShape(nanojit::LIns* obj_ins, JSObject* obj,
-                                                   uint32 shape, const char* name,
-                                                   nanojit::LIns* map_ins, VMSideExit* exit);
-
-    JSDHashTable guardedShapeTable;
-
-#ifdef DEBUG
-    void dumpGuardedShapes(const char* prefix);
-#endif
-
-    void forgetGuardedShapes();
+    JS_REQUIRES_STACK void guardShape(nanojit::LIns* obj_ins, JSObject* obj,
+                                      uint32 shape, const char* guardName,
+                                      nanojit::LIns* map_ins, VMSideExit* exit);
 
     inline nanojit::LIns* map(nanojit::LIns *obj_ins);
     JS_REQUIRES_STACK bool map_is_native(JSObjectMap* map, nanojit::LIns* map_ins,
                                          nanojit::LIns*& ops_ins, size_t op_offset = 0);
     JS_REQUIRES_STACK JSRecordingStatus test_property_cache(JSObject* obj, nanojit::LIns* obj_ins,
                                                             JSObject*& obj2, jsuword& pcval);
     JS_REQUIRES_STACK JSRecordingStatus guardNativePropertyOp(JSObject* aobj,
                                                               nanojit::LIns* map_ins);
@@ -1051,17 +1042,17 @@ public:
 
     JS_REQUIRES_STACK JSRecordingStatus record_EnterFrame();
     JS_REQUIRES_STACK JSRecordingStatus record_LeaveFrame();
     JS_REQUIRES_STACK JSRecordingStatus record_SetPropHit(JSPropCacheEntry* entry,
                                                           JSScopeProperty* sprop);
     JS_REQUIRES_STACK JSRecordingStatus record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
     JS_REQUIRES_STACK JSRecordingStatus record_NativeCallComplete();
 
-    void forgetGuardedShapesForObject(JSObject* obj);
+    TreeInfo* getTreeInfo() { return treeInfo; }
 
 #ifdef DEBUG
     void tprint(const char *format, int count, nanojit::LIns *insa[]);
     void tprint(const char *format);
     void tprint(const char *format, nanojit::LIns *ins);
     void tprint(const char *format, nanojit::LIns *ins1, nanojit::LIns *ins2);
     void tprint(const char *format, nanojit::LIns *ins1, nanojit::LIns *ins2, nanojit::LIns *ins3);
     void tprint(const char *format, nanojit::LIns *ins1, nanojit::LIns *ins2, nanojit::LIns *ins3,
--- a/js/src/nanojit/LIR.h
+++ b/js/src/nanojit/LIR.h
@@ -1074,23 +1074,16 @@ namespace nanojit
         }
         virtual LInsp insAlloc(int32_t size) {
             NanoAssert(size != 0);
             return out->insAlloc(size);
         }
         virtual LInsp insSkip(size_t size) {
             return out->insSkip(size);
         }
-        void insAssert(LIns* expr) {
-            #if defined DEBUG
-            LIns* branch = insBranch(LIR_jt, expr, NULL);
-            ins0(LIR_dbreak);
-            branch->setTarget(ins0(LIR_label));
-            #endif
-        }
 
         // convenience functions
 
         // Inserts a conditional to execute and branches to execute if
         // the condition is true and false respectively.
         LIns*        ins_choose(LIns* cond, LIns* iftrue, LIns* iffalse);
         // Inserts an integer comparison to 0
         LIns*        ins_eq0(LIns* oprnd1);
--- a/js/src/nanojit/LIRopcode.tbl
+++ b/js/src/nanojit/LIRopcode.tbl
@@ -69,17 +69,17 @@ 1234567890123456789012345678901234567890
  */
 
 /*    op    val name        operands */
 
 /* special operations (must be 0..N) */
 OPDEF(start,     0, 0, Op0)     // start of a fragment
 OPDEF(regfence,  1, 0, Op0)     // register fence, no register allocation is allowed across this meta instruction
 OPDEF(skip,      2, 1, Sk)      // holds blobs ("payloads") of data;  also links pages
-OPDEF(dbreak,    3, 0, Op0)
+OPDEF(unused3,   3,-1, None)
 OPDEF(unused4,   4,-1, None)
 OPDEF(unused5,   5,-1, None)
 OPDEF(unused6,   6,-1, None)
 
 /* non-pure operations */
 OPDEF(iaddp,     7, 2, Op2)     // integer addition for temporary pointer calculations (32bit only)
 OPDEF(iparam,    8, 0, P)       // load a parameter (32bit register or stk location)
 OPDEF(unused9,   9,-1, None)