Backed out changeset f4662701526b (bug 488414) to fix !JS_THREADSAFE compilation errors
authorIgor Bukanov <igor@mir2.org>
Mon, 20 Apr 2009 11:59:36 +0200
changeset 27545 c0a409243f7b5262a069d05a91a70f160c4e55c8
parent 27544 f4662701526b6c7d3402fdd8021b576a295218ec
child 27546 7523cef763511b868922545ecaa2964477a46fee
push id6604
push userrsayre@mozilla.com
push dateMon, 20 Apr 2009 18:44:02 +0000
treeherdermozilla-central@83068fe4a1ec [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs488414
milestone1.9.2a1pre
backs outf4662701526b6c7d3402fdd8021b576a295218ec
Backed out changeset f4662701526b (bug 488414) to fix !JS_THREADSAFE compilation errors
js/src/jsapi.cpp
js/src/jsbuiltins.cpp
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsinterp.cpp
js/src/jsinterp.h
js/src/jsobj.cpp
js/src/jsscope.cpp
js/src/jsscope.h
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -5213,17 +5213,22 @@ JS_TriggerOperationCallback(JSContext *c
      * visibility here, not read/write ordering.
      */
     JS_ATOMIC_SET(&cx->operationCallbackFlag, 1);
 }
 
 JS_PUBLIC_API(void)
 JS_TriggerAllOperationCallbacks(JSRuntime *rt)
 {
-    js_TriggerAllOperationCallbacks(rt, JS_FALSE);
+    JSContext *acx, *iter;
+    JS_LOCK_GC(rt);
+    iter = NULL;
+    while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)))
+        JS_TriggerOperationCallback(acx);
+    JS_UNLOCK_GC(rt);
 }
 
 JS_PUBLIC_API(JSBool)
 JS_IsRunning(JSContext *cx)
 {
     /* The use of cx->fp below is safe: if we're on trace, it is skipped. */
     VOUCH_DOES_NOT_REQUIRE_STACK();
 
--- a/js/src/jsbuiltins.cpp
+++ b/js/src/jsbuiltins.cpp
@@ -226,68 +226,67 @@ js_CallTree(InterpState* state, Fragment
     }
 
     return lr;
 }
 
 JSBool FASTCALL
 js_AddProperty(JSContext* cx, JSObject* obj, JSScopeProperty* sprop)
 {
+    JSScopeProperty* sprop2 = NULL; // initialize early to make MSVC happy
+
     JS_ASSERT(OBJ_IS_NATIVE(obj));
     JS_ASSERT(SPROP_HAS_STUB_SETTER(sprop));
 
     JS_LOCK_OBJ(cx, obj);
-
     JSScope* scope = OBJ_SCOPE(obj);
-    uint32 slot;
     if (scope->object == obj) {
         JS_ASSERT(!SCOPE_HAS_PROPERTY(scope, sprop));
     } else {
         scope = js_GetMutableScope(cx, obj);
-        if (!scope)
-            goto exit_trace;
+        if (!scope) {
+            JS_UNLOCK_OBJ(cx, obj);
+            return JS_FALSE;
+        }
     }
 
-    slot = sprop->slot;
+    uint32 slot = sprop->slot;
     if (!scope->table && sprop->parent == scope->lastProp && slot == scope->map.freeslot) {
         if (slot < STOBJ_NSLOTS(obj) && !OBJ_GET_CLASS(cx, obj)->reserveSlots) {
             JS_ASSERT(JSVAL_IS_VOID(STOBJ_GET_SLOT(obj, scope->map.freeslot)));
             ++scope->map.freeslot;
         } else {
-            if (!js_AllocSlot(cx, obj, &slot))
-                goto exit_trace;
+            if (!js_AllocSlot(cx, obj, &slot)) {
+                JS_UNLOCK_SCOPE(cx, scope);
+                return JS_FALSE;
+            }
 
             if (slot != sprop->slot) {
                 js_FreeSlot(cx, obj, slot);
-                goto exit_trace;
+                goto slot_changed;
             }
         }
 
         SCOPE_EXTEND_SHAPE(cx, scope, sprop);
         ++scope->entryCount;
         scope->lastProp = sprop;
-    } else {
-        JSScopeProperty *sprop2 = js_AddScopeProperty(cx, scope, sprop->id,
-                                                      sprop->getter,
-                                                      sprop->setter,
-                                                      SPROP_INVALID_SLOT,
-                                                      sprop->attrs,
-                                                      sprop->flags,
-                                                      sprop->shortid);
-        if (sprop2 != sprop)
-            goto exit_trace;
+        JS_UNLOCK_SCOPE(cx, scope);
+        return JS_TRUE;
     }
 
-    if (js_IsPropertyCacheDisabled(cx))
-        goto exit_trace;
+    sprop2 = js_AddScopeProperty(cx, scope, sprop->id,
+                                 sprop->getter, sprop->setter, SPROP_INVALID_SLOT,
+                                 sprop->attrs, sprop->flags, sprop->shortid);
+    if (sprop2 == sprop) {
+        JS_UNLOCK_SCOPE(cx, scope);
+        return JS_TRUE;
+    }
+    slot = sprop2->slot;
 
-    JS_UNLOCK_SCOPE(cx, scope);
-    return JS_TRUE;
-
-  exit_trace:
+  slot_changed:
     JS_UNLOCK_SCOPE(cx, scope);
     return JS_FALSE;
 }
 
 static JSBool
 HasProperty(JSContext* cx, JSObject* obj, jsid id)
 {
     // Check that we know how the lookup op will behave.
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -1714,57 +1714,37 @@ js_InvokeOperationCallback(JSContext *cx
     /*
      * Reset the callback flag first, then yield. If another thread is racing
      * us here we will accumulate another callback request which will be 
      * serviced at the next opportunity.
      */
     cx->operationCallbackFlag = 0;
 
     /*
-     * Unless we are going to run the GC, we automatically yield the current
-     * context every time the operation callback is hit since we might be
-     * called as a result of an impending GC, which would deadlock if we do
-     * not yield. Operation callbacks are supposed to happen rarely (seconds,
-     * not milliseconds) so it is acceptable to yield at every callback.
+     * We automatically yield the current context every time the operation
+     * callback is hit since we might be called as a result of an impending
+     * GC, which would deadlock if we do not yield. Operation callbacks
+     * are supposed to happen rarely (seconds, not milliseconds) so it is
+     * acceptable to yield at every callback.
      */
-    if (cx->runtime->gcIsNeeded)
-        js_GC(cx, GC_NORMAL);
 #ifdef JS_THREADSAFE    
-    else
-        JS_YieldRequest(cx);
+    JS_YieldRequest(cx);
 #endif
 
     JSOperationCallback cb = cx->operationCallback;
 
     /*
      * Important: Additional callbacks can occur inside the callback handler
      * if it re-enters the JS engine. The embedding must ensure that the
      * callback is disconnected before attempting such re-entry.
      */
 
     return !cb || cb(cx);
 }
 
-void
-js_TriggerAllOperationCallbacks(JSRuntime *rt, JSBool gcLocked)
-{
-    JSContext *acx, *iter;
-#ifdef JS_THREADSAFE
-    if (!gcLocked)
-        JS_LOCK_GC(rt);
-#endif
-    iter = NULL;
-    while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)))
-        JS_TriggerOperationCallback(acx);
-#ifdef JS_THREADSAFE
-    if (!gcLocked)
-        JS_UNLOCK_GC(rt);
-#endif
-}
-
 JSStackFrame *
 js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
 {
     if (!fp)
         fp = js_GetTopStackFrame(cx);
     while (fp) {
         if (fp->script)
             return fp;
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -349,17 +349,16 @@ struct JSRuntime {
     uint32              gcLastBytes;
     uint32              gcMaxBytes;
     uint32              gcMaxMallocBytes;
     uint32              gcEmptyArenaPoolLifespan;
     uint32              gcLevel;
     uint32              gcNumber;
     JSTracer            *gcMarkingTracer;
     uint32              gcTriggerFactor;
-    volatile JSBool     gcIsNeeded;
 
     /*
      * NB: do not pack another flag here by claiming gcPadding unless the new
      * flag is written only by the GC thread.  Atomic updates to packed bytes
      * are not guaranteed, so stores issued by one thread may be lost due to
      * unsynchronized read-modify-write cycles on other threads.
      */
     JSPackedBool        gcPoke;
@@ -547,26 +546,25 @@ struct JSRuntime {
 
 #define JS_THREAD_DATA(cx)      (&(cx)->runtime->threadData)
 #endif
 
     /*
      * Object shape (property cache structural type) identifier generator.
      *
      * Type 0 stands for the empty scope, and must not be regenerated due to
-     * uint32 wrap-around. Since js_GenerateShape (in jsinterp.cpp) uses
-     * atomic pre-increment, the initial value for the first typed non-empty
-     * scope will be 1.
+     * uint32 wrap-around. Since we use atomic pre-increment, the initial
+     * value for the first typed non-empty scope will be 1.
      *
+     * The GC compresses live types, minimizing rt->shapeGen in the process.
      * If this counter overflows into SHAPE_OVERFLOW_BIT (in jsinterp.h), the
-     * cache is disabled, to avoid aliasing two different types. It stays
-     * disabled until a triggered GC at some later moment compresses live
-     * types, minimizing rt->shapeGen in the process.
+     * GC will disable property caches for all threads, to avoid aliasing two
+     * different types. Updated by js_GenerateShape (in jsinterp.c).
      */
-    volatile uint32     shapeGen;
+    uint32              shapeGen;
 
     /* Literal table maintained by jsatom.c functions. */
     JSAtomState         atomState;
 
     /*
      * Cache of reusable JSNativeEnumerators mapped by shape identifiers (as
      * stored in scope->shape). This cache is nulled by the GC and protected
      * by gcLock.
@@ -1414,24 +1412,16 @@ extern JSErrorFormatString js_ErrorForma
 
 /*
  * Invoke the operation callback and return false if the current execution
  * is to be terminated.
  */
 extern JSBool
 js_InvokeOperationCallback(JSContext *cx);
 
-#ifndef JS_THREADSAFE
-# define js_TriggerAllOperationCallbacks(rt, gcLocked) \
-    js_TriggerAllOperationCallbacks (rt)
-#endif
-
-void
-js_TriggerAllOperationCallbacks(JSRuntime *rt, JSBool gcLocked);
-
 extern JSStackFrame *
 js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp);
 
 extern jsbytecode*
 js_GetCurrentBytecodePC(JSContext* cx);
 
 #ifdef JS_TRACER
 /*
@@ -1473,33 +1463,11 @@ js_CanLeaveTrace(JSContext *cx)
  */
 static JS_FORCES_STACK JS_INLINE JSStackFrame *
 js_GetTopStackFrame(JSContext *cx)
 {
     js_LeaveTrace(cx);
     return cx->fp;
 }
 
-static JS_INLINE JSBool
-js_IsPropertyCacheDisabled(JSContext *cx)
-{
-    return cx->runtime->shapeGen >= SHAPE_OVERFLOW_BIT;
-}
-
-static JS_INLINE uint32
-js_RegenerateShapeForGC(JSContext *cx)
-{
-    JS_ASSERT(cx->runtime->gcRunning);
-
-    /*
-     * Under the GC, compared with js_GenerateShape, we don't need to use
-     * atomic increments but we still must make sure that after an overflow
-     * the shape stays such.
-     */
-    uint32 shape = cx->runtime->shapeGen;
-    shape = (shape + 1) | (shape & SHAPE_OVERFLOW_BIT);
-    cx->runtime->shapeGen = shape;
-    return shape;
-}
-
 JS_END_EXTERN_C
 
 #endif /* jscntxt_h___ */
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -3172,34 +3172,16 @@ js_TraceRuntime(JSTracer *trc, JSBool al
 #else
         MarkReservedObjects(&rt->threadData.traceMonitor);
 #endif
     }
 
 #endif
 }
 
-void
-js_TriggerGC(JSContext *cx, JSBool gcLocked)
-{
-    JSRuntime *rt = cx->runtime;
-
-    JS_ASSERT(cx->requestDepth > 0);
-    JS_ASSERT(!rt->gcRunning);
-    if (rt->gcIsNeeded)
-        return;
-
-    /*
-     * Trigger the GC when it is safe to call an operation callback on any
-     * thread.
-     */
-    rt->gcIsNeeded = JS_TRUE;
-    js_TriggerAllOperationCallbacks(rt, gcLocked);
-}
-
 static void
 ProcessSetSlotRequest(JSContext *cx, JSSetSlotRequest *ssr)
 {
     JSObject *obj, *pobj;
     uint32 slot;
 
     obj = ssr->obj;
     pobj = ssr->pobj;
@@ -3300,16 +3282,17 @@ js_GC(JSContext *cx, JSGCInvocationKind 
     uint32 thingSize, indexLimit;
     JSGCArenaInfo *a, **ap, *emptyArenas;
     uint8 flags, *flagp;
     JSGCThing *thing, *freeList;
     JSGCArenaList *arenaList;
     JSBool allClear;
 #ifdef JS_THREADSAFE
     uint32 requestDebit;
+    JSContext *acx, *iter;
 #endif
 #ifdef JS_GCMETER
     uint32 nlivearenas, nkilledarenas, nthings;
 #endif
 
     JS_ASSERT_IF(gckind == GC_LAST_DITCH, !JS_ON_TRACE(cx));
     rt = cx->runtime;
 
@@ -3462,17 +3445,17 @@ js_GC(JSContext *cx, JSGCInvocationKind 
 
         /*
          * We assume here that killing links to parent and prototype objects
          * does not create garbage (such objects typically are long-lived and
          * widely shared, e.g. global objects, Function.prototype, etc.). We
          * collect garbage only if a racing thread attempted GC and is waiting
          * for us to finish (gcLevel > 1) or if someone already poked us.
          */
-        if (rt->gcLevel == 1 && !rt->gcPoke && !rt->gcIsNeeded)
+        if (rt->gcLevel == 1 && !rt->gcPoke)
             goto done_running;
 
         rt->gcLevel = 0;
         rt->gcPoke = JS_FALSE;
         rt->gcRunning = JS_FALSE;
 #ifdef JS_THREADSAFE
         rt->gcThread = NULL;
 #endif
@@ -3483,19 +3466,16 @@ js_GC(JSContext *cx, JSGCInvocationKind 
     JS_UNLOCK_GC(rt);
 
 #ifdef JS_TRACER
     if (JS_ON_TRACE(cx))
         goto out;
 #endif
     VOUCH_HAVE_STACK();
 
-    /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
-    rt->gcIsNeeded = JS_FALSE;
-
     /* Reset malloc counter. */
     rt->gcMallocBytes = 0;
 
 #ifdef JS_DUMP_SCOPE_METERS
   { extern void js_DumpScopeMeters(JSRuntime *rt);
     js_DumpScopeMeters(rt);
   }
 #endif
@@ -3781,16 +3761,33 @@ out:
     if (!JS_ON_TRACE(cx) && (rt->gcLevel > 1 || rt->gcPoke)) {
         VOUCH_HAVE_STACK();
         rt->gcLevel = 1;
         rt->gcPoke = JS_FALSE;
         JS_UNLOCK_GC(rt);
         goto restart;
     }
 
+    if (rt->shapeGen >= SHAPE_OVERFLOW_BIT - 1) {
+        /*
+         * FIXME bug 440834: The shape id space has overflowed. Currently we
+         * cope badly with this. Every call to js_GenerateShape does GC, and
+         * we never re-enable the property cache.
+         */
+        js_DisablePropertyCache(cx);
+#ifdef JS_THREADSAFE
+        iter = NULL;
+        while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) {
+            if (!acx->thread || acx->thread == cx->thread)
+                continue;
+            js_DisablePropertyCache(acx);
+        }
+#endif
+    }
+
     rt->gcLastBytes = rt->gcBytes;
   done_running:
     rt->gcLevel = 0;
     rt->gcRunning = JS_FALSE;
 
 #ifdef JS_THREADSAFE
     rt->gcThread = NULL;
     JS_NOTIFY_GC_DONE(rt);
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -243,26 +243,16 @@ js_TraceStackFrame(JSTracer *trc, JSStac
 
 extern JS_REQUIRES_STACK void
 js_TraceRuntime(JSTracer *trc, JSBool allAtoms);
 
 extern JS_REQUIRES_STACK JS_FRIEND_API(void)
 js_TraceContext(JSTracer *trc, JSContext *acx);
 
 /*
- * Schedule the GC call at a later safe point.
- */
-#ifndef JS_THREADSAFE
-# define js_TriggerGC(cx, gcLocked)    js_TriggerGC (cx)
-#endif
-
-extern void
-js_TriggerGC(JSContext *cx, JSBool gcLocked);
-
-/*
  * Kinds of js_GC invocation.
  */
 typedef enum JSGCInvocationKind {
     /* Normal invocation. */
     GC_NORMAL           = 0,
 
     /*
      * Called from js_DestroyContext for last JSContext in a JSRuntime, when
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -81,33 +81,36 @@
 #endif
 
 #include "jsautooplen.h"
 
 /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */
 #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___
 
 uint32
-js_GenerateShape(JSContext *cx, JSBool gcLocked)
+js_GenerateShape(JSContext *cx, JSBool gcLocked, JSScopeProperty *sprop)
 {
     JSRuntime *rt;
     uint32 shape;
+    JSTempValueRooter tvr;
 
     rt = cx->runtime;
     shape = JS_ATOMIC_INCREMENT(&rt->shapeGen);
     JS_ASSERT(shape != 0);
-    if (shape >= SHAPE_OVERFLOW_BIT) {
-        /*
-         * FIXME bug 440834: The shape id space has overflowed. Currently we
-         * cope badly with this and schedule the GC on the every call. But
-         * first we make sure that increments from other threads would not
-         * have a chance to wrap around shapeGen to zero.
-         */
-        rt->shapeGen = SHAPE_OVERFLOW_BIT;
-        js_TriggerGC(cx, gcLocked);
+    if (shape & SHAPE_OVERFLOW_BIT) {
+        rt->gcPoke = JS_TRUE;
+        if (sprop)
+            JS_PUSH_TEMP_ROOT_SPROP(cx, sprop, &tvr);
+        js_GC(cx, gcLocked ? GC_LOCK_HELD : GC_NORMAL);
+        if (sprop)
+            JS_POP_TEMP_ROOT(cx, &tvr);
+        shape = JS_ATOMIC_INCREMENT(&rt->shapeGen);
+        JS_ASSERT(shape != 0);
+        JS_ASSERT_IF(shape & SHAPE_OVERFLOW_BIT,
+                     JS_PROPERTY_CACHE(cx).disabled);
     }
     return shape;
 }
 
 JS_REQUIRES_STACK void
 js_FillPropertyCache(JSContext *cx, JSObject *obj, jsuword kshape,
                      uintN scopeIndex, uintN protoIndex,
                      JSObject *pobj, JSScopeProperty *sprop,
@@ -121,19 +124,18 @@ js_FillPropertyCache(JSContext *cx, JSOb
     jsuword vword;
     ptrdiff_t pcoff;
     jsuword khash;
     JSAtom *atom;
     JSPropCacheEntry *entry;
 
     JS_ASSERT(!cx->runtime->gcRunning);
     cache = &JS_PROPERTY_CACHE(cx);
-
-    /* FIXME bug 489098: consider enabling the property cache for eval. */
-    if (js_IsPropertyCacheDisabled(cx) || (cx->fp->flags & JSFRAME_EVAL)) {
+    pc = cx->fp->regs->pc;
+    if (cache->disabled || (cx->fp->flags & JSFRAME_EVAL)) {
         PCMETER(cache->disfills++);
         *entryp = NULL;
         return;
     }
 
     /*
      * Check for fill from js_SetPropertyHelper where the setter removed sprop
      * from pobj's scope (via unwatch or delete, e.g.).
@@ -189,17 +191,16 @@ js_FillPropertyCache(JSContext *cx, JSOb
         *entryp = NULL;
         return;
     }
 
     /*
      * Optimize the cached vword based on our parameters and the current pc's
      * opcode format flags.
      */
-    pc = cx->fp->regs->pc;
     op = js_GetOpcode(cx, cx->fp->script, pc);
     cs = &js_CodeSpec[op];
 
     do {
         /*
          * Check for a prototype "plain old method" callee computation. What
          * is a plain old method? It's a function-valued property with stub
          * getter and setter, so get of a function is idempotent and set is
@@ -231,24 +232,16 @@ js_FillPropertyCache(JSContext *cx, JSOb
                             "branding %p (%s) for funobj %p (%s), kshape %lu\n",
                             pobj, LOCKED_OBJ_GET_CLASS(pobj)->name,
                             JSVAL_TO_OBJECT(v),
                             JS_GetFunctionName(GET_FUNCTION_PRIVATE(cx,
                                                  JSVAL_TO_OBJECT(v))),
                             kshape);
 #endif
                         SCOPE_MAKE_UNIQUE_SHAPE(cx, scope);
-                        if (js_IsPropertyCacheDisabled(cx)) {
-                            /*
-                             * js_GenerateShape could not recover from
-                             * rt->shapeGen's overflow.
-                             */
-                            *entryp = NULL;
-                            return;
-                        }
                         SCOPE_SET_BRANDED(scope);
                         if (OBJ_SCOPE(obj) == scope)
                             kshape = scope->shape;
                     }
                     vword = JSVAL_OBJECT_TO_PCVAL(v);
                     break;
                 }
             }
@@ -533,16 +526,30 @@ js_PurgePropertyCacheForScript(JSContext
             entry->kshape = 0;
 #ifdef DEBUG
             entry->vcap = entry->vword = 0;
 #endif
         }
     }
 }
 
+void
+js_DisablePropertyCache(JSContext *cx)
+{
+    JS_ASSERT(JS_PROPERTY_CACHE(cx).disabled >= 0);
+    ++JS_PROPERTY_CACHE(cx).disabled;
+}
+
+void
+js_EnablePropertyCache(JSContext *cx)
+{
+    --JS_PROPERTY_CACHE(cx).disabled;
+    JS_ASSERT(JS_PROPERTY_CACHE(cx).disabled >= 0);
+}
+
 /*
  * Check if the current arena has enough space to fit nslots after sp and, if
  * so, reserve the necessary space.
  */
 static JS_REQUIRES_STACK JSBool
 AllocateAfterSP(JSContext *cx, jsval *sp, uintN nslots)
 {
     uintN surplus;
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -238,37 +238,38 @@ typedef struct JSInlineFrame {
 
 #define PCVCAP_MAKE(t,s,p)      (((t) << PCVCAP_TAGBITS) |                    \
                                  ((s) << PCVCAP_PROTOBITS) |                  \
                                  (p))
 #define PCVCAP_SHAPE(t)         ((t) >> PCVCAP_TAGBITS)
 
 #define SHAPE_OVERFLOW_BIT      JS_BIT(32 - PCVCAP_TAGBITS)
 
-#ifndef JS_THREADSAFE
-# define js_GenerateShape(cx, gcLocked)    js_GenerateShape (cx)
-#endif
-
+/*
+ * When sprop is not null and the shape generation triggers the GC due to a
+ * shape overflow, the functions roots sprop.
+ */
 extern uint32
-js_GenerateShape(JSContext *cx, JSBool gcLocked);
+js_GenerateShape(JSContext *cx, JSBool gcLocked, JSScopeProperty *sprop);
 
 struct JSPropCacheEntry {
     jsbytecode          *kpc;           /* pc if vcap tag is <= 1, else atom */
     jsuword             kshape;         /* key shape if pc, else obj for atom */
     jsuword             vcap;           /* value capability, see above */
     jsuword             vword;          /* value word, see PCVAL_* below */
 };
 
 #if defined DEBUG_brendan || defined DEBUG_brendaneich
 #define JS_PROPERTY_CACHE_METERING 1
 #endif
 
 typedef struct JSPropertyCache {
     JSPropCacheEntry    table[PROPERTY_CACHE_SIZE];
     JSBool              empty;
+    jsrefcount          disabled;       /* signed for anti-underflow asserts */
 #ifdef JS_PROPERTY_CACHE_METERING
     uint32              fills;          /* number of cache entry fills */
     uint32              nofills;        /* couldn't fill (e.g. default get) */
     uint32              rofills;        /* set on read-only prop can't fill */
     uint32              disfills;       /* fill attempts on disabled cache */
     uint32              oddfills;       /* fill attempt after setter deleted */
     uint32              modfills;       /* fill that rehashed to a new entry */
     uint32              brandfills;     /* scope brandings to type structural
@@ -403,16 +404,22 @@ js_FullTestPropertyCache(JSContext *cx, 
 #define js_FinishPropertyCache(cache) ((void) 0)
 
 extern void
 js_PurgePropertyCache(JSContext *cx, JSPropertyCache *cache);
 
 extern void
 js_PurgePropertyCacheForScript(JSContext *cx, JSScript *script);
 
+extern void
+js_DisablePropertyCache(JSContext *cx);
+
+extern void
+js_EnablePropertyCache(JSContext *cx);
+
 /*
  * Interpreter stack arena-pool alloc and free functions.
  */
 extern JS_REQUIRES_STACK JS_FRIEND_API(jsval *)
 js_AllocStack(JSContext *cx, uintN nslots, void **markp);
 
 extern JS_REQUIRES_STACK JS_FRIEND_API(void)
 js_FreeStack(JSContext *cx, void *mark);
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -5812,23 +5812,27 @@ js_TraceObject(JSTracer *trc, JSObject *
         sprop = SCOPE_LAST_PROP(scope);
         if (sprop) {
             JS_ASSERT(SCOPE_HAS_PROPERTY(scope, sprop));
 
             /* Regenerate property cache shape ids if GC'ing. */
             if (IS_GC_MARKING_TRACER(trc)) {
                 uint32 shape, oldshape;
 
-                shape = js_RegenerateShapeForGC(cx);
+                shape = ++cx->runtime->shapeGen;
+                JS_ASSERT(shape != 0);
+
                 if (!(sprop->flags & SPROP_MARK)) {
                     oldshape = sprop->shape;
                     sprop->shape = shape;
                     sprop->flags |= SPROP_FLAG_SHAPE_REGEN;
-                    if (scope->shape != oldshape)
-                        shape = js_RegenerateShapeForGC(cx);
+                    if (scope->shape != oldshape) {
+                        shape = ++cx->runtime->shapeGen;
+                        JS_ASSERT(shape != 0);
+                    }
                 }
 
                 scope->shape = shape;
             }
 
             /* Trace scope's property tree ancestor line. */
             do {
                 if (SCOPE_HAD_MIDDLE_DELETE(scope) &&
--- a/js/src/jsscope.cpp
+++ b/js/src/jsscope.cpp
@@ -783,29 +783,30 @@ HashChunks(PropTreeKidsChunk *chunk, uin
     return table;
 }
 
 /*
  * Called without cx->runtime->gcLock held. This function acquires that lock
  * only when inserting a new child.  Thus there may be races to find or add a
  * node that result in duplicates.  We expect such races to be rare!
  *
- * We use rt->gcLock, not rt->rtLock, to avoid nesting the former inside the
- * latter in js_GenerateShape below.
+ * We use rt->gcLock, not rt->rtLock, to allow the GC potentially to nest here
+ * under js_GenerateShape.
  */
 static JSScopeProperty *
 GetPropertyTreeChild(JSContext *cx, JSScopeProperty *parent,
                      JSScopeProperty *child)
 {
     JSRuntime *rt;
     JSDHashTable *table;
     JSPropertyTreeEntry *entry;
     JSScopeProperty *sprop;
     PropTreeKidsChunk *chunk;
     uintN i, n;
+    uint32 shape;
 
     rt = cx->runtime;
     if (!parent) {
         JS_LOCK_GC(rt);
 
         table = &rt->propertyTreeHash;
         entry = (JSPropertyTreeEntry *)
                 JS_DHashTableOperate(table, child, JS_DHASH_ADD);
@@ -882,29 +883,35 @@ GetPropertyTreeChild(JSContext *cx, JSSc
             }
         }
 
     not_found:
         JS_LOCK_GC(rt);
     }
 
 locked_not_found:
+    /*
+     * Call js_GenerateShape before the allocation to prevent collecting the
+     * new property when the shape generation triggers the GC.
+     */
+    shape = js_GenerateShape(cx, JS_TRUE, NULL);
+
     sprop = NewScopeProperty(rt);
     if (!sprop)
         goto out_of_memory;
 
     sprop->id = child->id;
     sprop->getter = child->getter;
     sprop->setter = child->setter;
     sprop->slot = child->slot;
     sprop->attrs = child->attrs;
     sprop->flags = child->flags;
     sprop->shortid = child->shortid;
     sprop->parent = sprop->kids = NULL;
-    sprop->shape = js_GenerateShape(cx, JS_TRUE);
+    sprop->shape = shape;
 
     if (!parent) {
         entry->child = sprop;
     } else {
         if (!InsertPropertyTreeChild(rt, parent, sprop, NULL))
             goto out_of_memory;
     }
 
@@ -1720,20 +1727,22 @@ js_SweepScopeProperties(JSContext *cx)
              * and continue the while loop.
              *
              * Regenerate sprop->shape if it hasn't already been refreshed
              * during the mark phase, when live scopes' lastProp members are
              * followed to update both scope->shape and lastProp->shape.
              */
             if (sprop->flags & SPROP_MARK) {
                 sprop->flags &= ~SPROP_MARK;
-                if (sprop->flags & SPROP_FLAG_SHAPE_REGEN)
+                if (sprop->flags & SPROP_FLAG_SHAPE_REGEN) {
                     sprop->flags &= ~SPROP_FLAG_SHAPE_REGEN;
-                else
-                    sprop->shape = js_RegenerateShapeForGC(cx);
+                } else {
+                    sprop->shape = ++cx->runtime->shapeGen;
+                    JS_ASSERT(sprop->shape != 0);
+                }
                 liveCount++;
                 continue;
             }
 
             /* Ok, sprop is garbage to collect: unlink it from its parent. */
             freeChunk = RemovePropertyTreeChild(rt, sprop);
 
             /*
--- a/js/src/jsscope.h
+++ b/js/src/jsscope.h
@@ -212,25 +212,25 @@ struct JSScope {
 };
 
 #define JS_IS_SCOPE_LOCKED(cx, scope)   JS_IS_TITLE_LOCKED(cx, &(scope)->title)
 
 #define OBJ_SCOPE(obj)                  ((JSScope *)(obj)->map)
 #define OBJ_SHAPE(obj)                  (OBJ_SCOPE(obj)->shape)
 
 #define SCOPE_MAKE_UNIQUE_SHAPE(cx,scope)                                     \
-    ((scope)->shape = js_GenerateShape((cx), JS_FALSE))
+    ((scope)->shape = js_GenerateShape((cx), JS_FALSE, NULL))
 
 #define SCOPE_EXTEND_SHAPE(cx,scope,sprop)                                    \
     JS_BEGIN_MACRO                                                            \
         if (!(scope)->lastProp ||                                             \
             (scope)->shape == (scope)->lastProp->shape) {                     \
             (scope)->shape = (sprop)->shape;                                  \
         } else {                                                              \
-            (scope)->shape = js_GenerateShape(cx, JS_FALSE);                  \
+            (scope)->shape = js_GenerateShape((cx), JS_FALSE, sprop);         \
         }                                                                     \
     JS_END_MACRO
 
 /* By definition, hashShift = JS_DHASH_BITS - log2(capacity). */
 #define SCOPE_CAPACITY(scope)           JS_BIT(JS_DHASH_BITS-(scope)->hashShift)
 
 /* Scope flags and some macros to hide them from other files than jsscope.c. */
 #define SCOPE_MIDDLE_DELETE             0x0001