Bug 609104 - Move the property tree to the compartment (r=brendan)
authorBill McCloskey <wmccloskey@mozilla.com>
Fri, 04 Feb 2011 10:59:07 -0800
changeset 62077 db8be4e3f373398ea980eb3c68de9860114c3a50
parent 62076 c08656eb9e399f7256a289afc0ab25705c63a2fd
child 62078 8003e273cd7e99530414ec09b39aadf0bb9dcfe7
push idunknown
push userunknown
push dateunknown
reviewersbrendan
bugs609104
milestone2.0b11pre
Bug 609104 - Move the property tree to the compartment (r=brendan)
js/src/jsapi.cpp
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsdbgapi.cpp
js/src/jsfun.cpp
js/src/jsgc.cpp
js/src/jsiter.cpp
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jspropertytree.cpp
js/src/jspropertytree.h
js/src/jsproxy.cpp
js/src/jsscope.cpp
js/src/jsscope.h
js/src/jsscopeinlines.h
js/src/jsscriptinlines.h
js/src/jstracer.cpp
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -643,20 +643,16 @@ JSRuntime::init(uint32 maxbytes)
     if (functionMeterFilename) {
         if (!methodReadBarrierCountMap.init())
             return false;
         if (!unjoinedFunctionCountMap.init())
             return false;
     }
     propTreeStatFilename = getenv("JS_PROPTREE_STATFILE");
     propTreeDumpFilename = getenv("JS_PROPTREE_DUMPFILE");
-    if (meterEmptyShapes()) {
-        if (!emptyShapes.init())
-            return false;
-    }
 #endif
 
     if (!(atomsCompartment = js_new<JSCompartment>(this)) ||
         !atomsCompartment->init() ||
         !compartments.append(atomsCompartment)) {
         return false;
     }
 
@@ -677,17 +673,17 @@ JSRuntime::init(uint32 maxbytes)
         return false;
     debuggerLock = JS_NEW_LOCK();
     if (!debuggerLock)
         return false;
 #endif
 
     debugMode = JS_FALSE;
 
-    return propertyTree.init() && js_InitThreads(this);
+    return js_InitThreads(this);
 }
 
 JSRuntime::~JSRuntime()
 {
 #ifdef DEBUG
     /* Don't hurt everyone in leaky ol' Mozilla with a fatal JS_ASSERT! */
     if (!JS_CLIST_IS_EMPTY(&contextList)) {
         JSContext *cx, *iter = NULL;
@@ -718,17 +714,16 @@ JSRuntime::~JSRuntime()
         JS_DESTROY_CONDVAR(requestDone);
     if (rtLock)
         JS_DESTROY_LOCK(rtLock);
     if (stateChange)
         JS_DESTROY_CONDVAR(stateChange);
     if (debuggerLock)
         JS_DESTROY_LOCK(debuggerLock);
 #endif
-    propertyTree.finish();
 }
 
 JS_PUBLIC_API(JSRuntime *)
 JS_NewRuntime(uint32 maxbytes)
 {
 #ifdef DEBUG
     if (!js_NewRuntimeWasCalled) {
         /*
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -806,29 +806,16 @@ js_NewContext(JSRuntime *rt, size_t stac
          * scriptFilenameTable may be left over from a previous episode of
          * non-zero contexts alive in rt, so don't re-init the table if it's
          * not necessary.
          */
         if (ok && !rt->scriptFilenameTable)
             ok = js_InitRuntimeScriptState(rt);
         if (ok)
             ok = js_InitRuntimeNumberState(cx);
-        if (ok) {
-            /*
-             * Ensure that the empty scopes initialized by
-             * Shape::initRuntimeState get the desired special shapes.
-             * (The rt->state dance above guarantees that this abuse of
-             * rt->shapeGen is thread-safe.)
-             */
-            uint32 shapeGen = rt->shapeGen;
-            rt->shapeGen = 0;
-            ok = Shape::initRuntimeState(cx);
-            if (rt->shapeGen < shapeGen)
-                rt->shapeGen = shapeGen;
-        }
 
 #ifdef JS_THREADSAFE
         JS_EndRequest(cx);
 #endif
         if (!ok) {
             js_DestroyContext(cx, JSDCM_NEW_FAILED);
             return NULL;
         }
@@ -1040,17 +1027,16 @@ js_DestroyContext(JSContext *cx, JSDestr
              * not-last context destruction racing in another thread try to
              * force or maybe run the GC, but by that point, rt->state will
              * not be JSRTS_UP, and that GC attempt will return early.
              */
             if (cx->thread->data.requestDepth == 0)
                 JS_BeginRequest(cx);
 #endif
 
-            Shape::finishRuntimeState(cx);
             js_FinishRuntimeNumberState(cx);
 
             /* Unpin all common atoms before final GC. */
             js_FinishCommonAtoms(cx);
 
             /* Clear debugging state to remove GC roots. */
             JS_ClearAllTraps(cx);
             JS_ClearAllWatchPoints(cx);
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -1174,25 +1174,16 @@ struct JSRuntime {
      * an override is set on the context.
      */
     JSSecurityCallbacks *securityCallbacks;
 
     /* Structured data callbacks are runtime-wide. */
     const JSStructuredCloneCallbacks *structuredCloneCallbacks;
 
     /*
-     * Shared scope property tree, and arena-pool for allocating its nodes.
-     * This really should be free of all locking overhead and allocated in
-     * thread-local storage, hence the JS_PROPERTY_TREE(cx) macro.
-     */
-    js::PropertyTree    propertyTree;
-
-#define JS_PROPERTY_TREE(cx) ((cx)->runtime->propertyTree)
-
-    /*
      * The propertyRemovals counter is incremented for every JSObject::clear,
      * and for each JSObject::remove method call that frees a slot in the given
      * object. See js_NativeGet and js_NativeSet in jsobj.cpp.
      */
     int32               propertyRemovals;
 
     /* Script filename table. */
     struct JSHashTable  *scriptFilenameTable;
@@ -1239,27 +1230,16 @@ struct JSRuntime {
      * types, minimizing rt->shapeGen in the process.
      */
     volatile uint32     shapeGen;
 
     /* Literal table maintained by jsatom.c functions. */
     JSAtomState         atomState;
 
     /*
-     * Runtime-shared empty scopes for well-known built-in objects that lack
-     * class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
-     */
-    js::EmptyShape      *emptyArgumentsShape;
-    js::EmptyShape      *emptyBlockShape;
-    js::EmptyShape      *emptyCallShape;
-    js::EmptyShape      *emptyDeclEnvShape;
-    js::EmptyShape      *emptyEnumeratorShape;
-    js::EmptyShape      *emptyWithShape;
-
-    /*
      * Various metering fields are defined at the end of JSRuntime. In this
      * way there is no need to recompile all the code that refers to other
      * fields of JSRuntime after enabling the corresponding metering macro.
      */
 #ifdef JS_DUMP_ENUM_CACHE_STATS
     int32               nativeEnumProbes;
     int32               nativeEnumMisses;
 # define ENUM_CACHE_METER(name)     JS_ATOMIC_INCREMENT(&cx->runtime->name)
@@ -1274,43 +1254,31 @@ struct JSRuntime {
 
 #ifdef DEBUG
     /* Function invocation metering. */
     jsrefcount          inlineCalls;
     jsrefcount          nativeCalls;
     jsrefcount          nonInlineCalls;
     jsrefcount          constructs;
 
-    /* Property metering. */
     jsrefcount          liveObjectProps;
     jsrefcount          liveObjectPropsPreSweep;
-    jsrefcount          totalObjectProps;
-    jsrefcount          livePropTreeNodes;
-    jsrefcount          duplicatePropTreeNodes;
-    jsrefcount          totalPropTreeNodes;
-    jsrefcount          propTreeKidsChunks;
-    jsrefcount          liveDictModeNodes;
 
     /*
-     * NB: emptyShapes is init'ed iff at least one of these envars is set:
+     * NB: emptyShapes (in JSCompartment) is init'ed iff at least one
+     * of these envars is set:
      *
      *  JS_PROPTREE_STATFILE  statistics on the property tree forest
      *  JS_PROPTREE_DUMPFILE  all paths in the property tree forest
      */
     const char          *propTreeStatFilename;
     const char          *propTreeDumpFilename;
 
     bool meterEmptyShapes() const { return propTreeStatFilename || propTreeDumpFilename; }
 
-    typedef js::HashSet<js::EmptyShape *,
-                        js::DefaultHasher<js::EmptyShape *>,
-                        js::SystemAllocPolicy> EmptyShapeSet;
-
-    EmptyShapeSet       emptyShapes;
-
     /* String instrumentation. */
     jsrefcount          liveStrings;
     jsrefcount          totalStrings;
     jsrefcount          liveDependentStrings;
     jsrefcount          totalDependentStrings;
     jsrefcount          badUndependStrings;
     double              lengthSum;
     double              lengthSquaredSum;
@@ -3212,29 +3180,29 @@ js_GetTopStackFrame(JSContext *cx)
 
 static JS_INLINE JSBool
 js_IsPropertyCacheDisabled(JSContext *cx)
 {
     return cx->runtime->shapeGen >= js::SHAPE_OVERFLOW_BIT;
 }
 
 static JS_INLINE uint32
-js_RegenerateShapeForGC(JSContext *cx)
+js_RegenerateShapeForGC(JSRuntime *rt)
 {
-    JS_ASSERT(cx->runtime->gcRunning);
-    JS_ASSERT(cx->runtime->gcRegenShapes);
+    JS_ASSERT(rt->gcRunning);
+    JS_ASSERT(rt->gcRegenShapes);
 
     /*
      * Under the GC, compared with js_GenerateShape, we don't need to use
      * atomic increments but we still must make sure that after an overflow
      * the shape stays such.
      */
-    uint32 shape = cx->runtime->shapeGen;
+    uint32 shape = rt->shapeGen;
     shape = (shape + 1) | (shape & js::SHAPE_OVERFLOW_BIT);
-    cx->runtime->shapeGen = shape;
+    rt->shapeGen = shape;
     return shape;
 }
 
 namespace js {
 
 inline void *
 ContextAllocPolicy::malloc(size_t bytes)
 {
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -57,39 +57,43 @@ using namespace js::gc;
 
 JSCompartment::JSCompartment(JSRuntime *rt)
   : rt(rt),
     principals(NULL),
     gcBytes(0),
     gcTriggerBytes(0),
     gcLastBytes(0),
     data(NULL),
-    marked(false),
     active(false),
 #ifdef JS_METHODJIT
     jaegerCompartment(NULL),
 #endif
+    propertyTree(this),
     debugMode(rt->debugMode),
 #if ENABLE_YARR_JIT
     regExpAllocator(NULL),
 #endif
-    mathCache(NULL)
+    mathCache(NULL),
+    marked(false)
 {
     JS_INIT_CLIST(&scripts);
 
 #ifdef JS_TRACER
     /* InitJIT expects this area to be zero'd. */
     PodZero(&traceMonitor);
 #endif
 
     PodArrayZero(scriptsToGC);
 }
 
 JSCompartment::~JSCompartment()
 {
+    Shape::finishEmptyShapes(this);
+    propertyTree.finish();
+
 #if ENABLE_YARR_JIT
     js_delete(regExpAllocator);
 #endif
 
 #if defined JS_TRACER
     FinishJIT(&traceMonitor);
 #endif
 
@@ -114,20 +118,32 @@ JSCompartment::init()
     for (unsigned i = 0; i < FINALIZE_LIMIT; i++)
         freeLists.finalizables[i] = NULL;
 #ifdef JS_GCMETER
     memset(&compartmentStats, 0, sizeof(JSGCArenaStats) * FINALIZE_LIMIT);
 #endif
     if (!crossCompartmentWrappers.init())
         return false;
 
+    if (!propertyTree.init())
+        return false;
+
+#ifdef DEBUG
+    if (rt->meterEmptyShapes()) {
+        if (!emptyShapes.init())
+            return false;
+    }
+#endif
+
+    if (!Shape::initEmptyShapes(this))
+        return false;
+
 #ifdef JS_TRACER
-    if (!InitJIT(&traceMonitor)) {
+    if (!InitJIT(&traceMonitor))
         return false;
-    }
 #endif
 
     if (!toSourceCache.init())
         return false;
 
 #if ENABLE_YARR_JIT
     regExpAllocator = JSC::ExecutableAllocator::create();
     if (!regExpAllocator)
@@ -393,21 +409,54 @@ ScriptPoolDestroyed(JSContext *cx, mjit:
             pool->m_destroy = true;
             counter = releaseInterval;
         }
     }
     return pool->m_destroy;
 }
 #endif
 
+/*
+ * This method marks pointers that cross compartment boundaries. It should be
+ * called only by per-compartment GCs, since full GCs naturally follow pointers
+ * across compartments.
+ */
+void
+JSCompartment::markCrossCompartment(JSTracer *trc)
+{
+    for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront())
+        MarkValue(trc, e.front().key, "cross-compartment wrapper");
+}
+
 void
 JSCompartment::mark(JSTracer *trc)
 {
-    for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront())
-        MarkValue(trc, e.front().key, "cross-compartment wrapper");
+    if (IS_GC_MARKING_TRACER(trc)) {
+        JSRuntime *rt = trc->context->runtime;
+        if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != this)
+            return;
+        
+        if (marked)
+            return;
+        
+        marked = true;
+    }
+
+    if (emptyArgumentsShape)
+        emptyArgumentsShape->trace(trc);
+    if (emptyBlockShape)
+        emptyBlockShape->trace(trc);
+    if (emptyCallShape)
+        emptyCallShape->trace(trc);
+    if (emptyDeclEnvShape)
+        emptyDeclEnvShape->trace(trc);
+    if (emptyEnumeratorShape)
+        emptyEnumeratorShape->trace(trc);
+    if (emptyWithShape)
+        emptyWithShape->trace(trc);
 }
 
 void
 JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
 {
     chunk = NULL;
     /* Remove dead wrappers from the table. */
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -393,39 +393,73 @@ struct JS_FRIEND_API(JSCompartment) {
     /* Hashed lists of scripts created by eval to garbage-collect. */
     JSScript                     *scriptsToGC[JS_EVAL_CACHE_SIZE];
 
 #ifdef DEBUG
     JSEvalCacheMeter             evalCacheMeter;
 #endif
 
     void                         *data;
-    bool                         marked;
     bool                         active;  // GC flag, whether there are active frames
     js::WrapperMap               crossCompartmentWrappers;
 
 #ifdef JS_METHODJIT
     js::mjit::JaegerCompartment  *jaegerCompartment;
 #endif
 
+    /*
+     * Shared scope property tree, and arena-pool for allocating its nodes.
+     */
+    js::PropertyTree             propertyTree;
+
+#ifdef DEBUG
+    /* Property metering. */
+    jsrefcount                   livePropTreeNodes;
+    jsrefcount                   totalPropTreeNodes;
+    jsrefcount                   propTreeKidsChunks;
+    jsrefcount                   liveDictModeNodes;
+#endif
+
+    /*
+     * Runtime-shared empty scopes for well-known built-in objects that lack
+     * class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
+     */
+    js::EmptyShape               *emptyArgumentsShape;
+    js::EmptyShape               *emptyBlockShape;
+    js::EmptyShape               *emptyCallShape;
+    js::EmptyShape               *emptyDeclEnvShape;
+    js::EmptyShape               *emptyEnumeratorShape;
+    js::EmptyShape               *emptyWithShape;
+
+    typedef js::HashSet<js::EmptyShape *,
+                        js::DefaultHasher<js::EmptyShape *>,
+                        js::SystemAllocPolicy> EmptyShapeSet;
+
+    EmptyShapeSet                emptyShapes;
+
     bool                         debugMode;  // true iff debug mode on
     JSCList                      scripts;    // scripts in this compartment
 
     JSC::ExecutableAllocator     *regExpAllocator;
 
     js::NativeIterCache          nativeIterCache;
 
     js::ToSourceCache            toSourceCache;
 
-    JSCompartment(JSRuntime *cx);
+    JSCompartment(JSRuntime *rt);
     ~JSCompartment();
 
     bool init();
 
+    /* Mark cross-compartment pointers. */
+    void markCrossCompartment(JSTracer *trc);
+
+    /* Mark this compartment's local roots. */
     void mark(JSTracer *trc);
+
     bool wrap(JSContext *cx, js::Value *vp);
     bool wrap(JSContext *cx, JSString **strp);
     bool wrap(JSContext *cx, JSObject **objp);
     bool wrapId(JSContext *cx, jsid *idp);
     bool wrap(JSContext *cx, js::PropertyOp *op);
     bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
     bool wrap(JSContext *cx, js::AutoIdVector &props);
 
@@ -440,24 +474,35 @@ struct JS_FRIEND_API(JSCompartment) {
 
     js::DtoaCache dtoaCache;
 
   private:
     js::MathCache                *mathCache;
 
     js::MathCache *allocMathCache(JSContext *cx);
 
+    bool                         marked;
+    
   public:
     js::MathCache *getMathCache(JSContext *cx) {
         return mathCache ? mathCache : allocMathCache(cx);
     }
+
+    bool isMarked() { return marked; }
+    void clearMark() { marked = false; }
 };
 
-#define JS_TRACE_MONITOR(cx)    (cx->compartment->traceMonitor)
-#define JS_SCRIPTS_TO_GC(cx)    (cx->compartment->scriptsToGC)
+#define JS_SCRIPTS_TO_GC(cx)    ((cx)->compartment->scriptsToGC)
+#define JS_PROPERTY_TREE(cx)    ((cx)->compartment->propertyTree)
+
+#ifdef DEBUG
+#define JS_COMPARTMENT_METER(x) x
+#else
+#define JS_COMPARTMENT_METER(x)
+#endif
 
 /*
  * N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current
  * thread, regardless of whether cx is the context in which that trace is
  * executing. cx must be a context on the current thread.
  */
 static inline bool
 JS_ON_TRACE(JSContext *cx)
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -589,16 +589,22 @@ DropWatchPointAndUnlock(JSContext *cx, J
     JSRuntime *rt = cx->runtime;
 
     wp->flags &= ~flag;
     if (wp->flags != 0) {
         DBG_UNLOCK(rt);
         return ok;
     }
 
+    /*
+     * Switch to the same compartment as the watch point, since changeProperty, below,
+     * needs to have a compartment.
+     */
+    SwitchToCompartment sc(cx, wp->object);
+
     /* Remove wp from the list, then restore wp->shape->setter from wp. */
     ++rt->debuggerMutations;
     JS_REMOVE_LINK(&wp->links);
     DBG_UNLOCK(rt);
 
     /*
      * If the property isn't found on wp->object, then someone else must have deleted it,
      * and we don't need to change the property attributes.
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -196,17 +196,17 @@ NewArguments(JSContext *cx, JSObject *pa
     SetValueRangeToUndefined(data->slots, argc);
 
     /* Can't fail from here on, so initialize everything in argsobj. */
     argsobj->init(cx, callee.getFunctionPrivate()->inStrictMode()
                   ? &StrictArgumentsClass
                   : &js_ArgumentsClass,
                   proto, parent, NULL, false);
 
-    argsobj->setMap(cx->runtime->emptyArgumentsShape);
+    argsobj->setMap(cx->compartment->emptyArgumentsShape);
 
     argsobj->setArgsLength(argc);
     argsobj->setArgsData(data);
     data->callee.setObject(callee);
 
     return argsobj;
 }
 
@@ -986,17 +986,17 @@ NewCallObject(JSContext *cx, Bindings *b
 static inline JSObject *
 NewDeclEnvObject(JSContext *cx, JSStackFrame *fp)
 {
     JSObject *envobj = js_NewGCObject(cx, FINALIZE_OBJECT2);
     if (!envobj)
         return NULL;
 
     envobj->init(cx, &js_DeclEnvClass, NULL, &fp->scopeChain(), fp, false);
-    envobj->setMap(cx->runtime->emptyDeclEnvShape);
+    envobj->setMap(cx->compartment->emptyDeclEnvShape);
     return envobj;
 }
 
 JSObject *
 js_GetCallObject(JSContext *cx, JSStackFrame *fp)
 {
     /* Create a call object for fp only if it lacks one. */
     JS_ASSERT(fp->isFunctionFrame());
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -849,17 +849,17 @@ js_FinishGC(JSRuntime *rt)
 #ifdef JS_ARENAMETER
     JS_DumpArenaStats(stdout);
 #endif
 #ifdef JS_GCMETER
     if (JS_WANT_GC_METER_PRINT)
         js_DumpGCStats(rt, stdout);
 #endif
 
-    /* Delete all remaining Compartments. Ideally only the atomsCompartment should be left. */
+    /* Delete all remaining Compartments. */
     for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
         JSCompartment *comp = *c;
         comp->finishArenaLists();
         js_delete(comp);
     }
     rt->compartments.clear();
     rt->atomsCompartment = NULL;
 
@@ -1022,17 +1022,16 @@ JSRuntime::setGCTriggerFactor(uint32 fac
     JS_ASSERT(factor >= 100);
 
     gcTriggerFactor = factor;
     setGCLastBytes(gcLastBytes);
 
     for (JSCompartment **c = compartments.begin(); c != compartments.end(); ++c) {
         (*c)->setGCLastBytes(gcLastBytes);
     }
-    atomsCompartment->setGCLastBytes(gcLastBytes);
 }
 
 void
 JSRuntime::setGCLastBytes(size_t lastBytes)
 {
     gcLastBytes = lastBytes;
 
     /* FIXME bug 603916 - we should unify the triggers here. */
@@ -1629,17 +1628,17 @@ MarkContext(JSTracer *trc, JSContext *ac
         gcr->trace(trc);
 
     if (acx->sharpObjectMap.depth > 0)
         js_TraceSharpMap(trc, &acx->sharpObjectMap);
 
     MarkValue(trc, acx->iterValue, "iterValue");
 
     if (acx->compartment)
-        acx->compartment->marked = true;
+        acx->compartment->mark(trc);
 }
 
 JS_REQUIRES_STACK void
 MarkRuntime(JSTracer *trc)
 {
     JSRuntime *rt = trc->context->runtime;
 
     if (rt->state != JSRTS_LANDING)
@@ -1718,37 +1717,26 @@ MarkRuntime(JSTracer *trc)
 
     js_TraceAtomState(trc);
     js_MarkTraps(trc);
 
     iter = NULL;
     while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
         MarkContext(trc, acx);
 
+    rt->atomsCompartment->mark(trc);
+
 #ifdef JS_TRACER
     for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
         (*c)->traceMonitor.mark(trc);
 #endif
 
     for (ThreadDataIter i(rt); !i.empty(); i.popFront())
         i.threadData()->mark(trc);
 
-    if (rt->emptyArgumentsShape)
-        rt->emptyArgumentsShape->trace(trc);
-    if (rt->emptyBlockShape)
-        rt->emptyBlockShape->trace(trc);
-    if (rt->emptyCallShape)
-        rt->emptyCallShape->trace(trc);
-    if (rt->emptyDeclEnvShape)
-        rt->emptyDeclEnvShape->trace(trc);
-    if (rt->emptyEnumeratorShape)
-        rt->emptyEnumeratorShape->trace(trc);
-    if (rt->emptyWithShape)
-        rt->emptyWithShape->trace(trc);
-
     /*
      * We mark extra roots at the last thing so it can use use additional
      * colors to implement cycle collection.
      */
     if (rt->gcExtraRootsTraceOp)
         rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
 
 #ifdef DEBUG
@@ -2193,26 +2181,27 @@ SweepCompartments(JSContext *cx, JSGCInv
     JSCompartment **write = read;
     JS_ASSERT(rt->compartments.length() >= 1);
     JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
 
     while (read < end) {
         JSCompartment *compartment = *read++;
 
         /* Unmarked compartments containing marked objects don't get deleted, except LAST_CONTEXT GC is performed. */
-        if ((!compartment->marked && compartment->arenaListsAreEmpty()) || gckind == GC_LAST_CONTEXT) {
+        if ((!compartment->isMarked() && compartment->arenaListsAreEmpty())
+            || gckind == GC_LAST_CONTEXT)
+        {
             JS_ASSERT(compartment->freeLists.isEmpty());
             if (callback)
                 (void) callback(cx, compartment, JSCOMPARTMENT_DESTROY);
             if (compartment->principals)
                 JSPRINCIPALS_DROP(cx, compartment->principals);
             js_delete(compartment);
             continue;
         }
-        compartment->marked = false;
         *write++ = compartment;
     }
     rt->compartments.resize(write - rt->compartments.begin());
 }
 
 /*
  * Common cache invalidation and so forth that must be done before GC. Even if
  * GCUntilDone calls GC several times, this work needs to be done only once.
@@ -2242,17 +2231,17 @@ PreGCCleanup(JSContext *cx, JSGCInvocati
      * prototypes having readonly or setter properties.
      */
     if (rt->shapeGen & SHAPE_OVERFLOW_BIT
 #ifdef JS_GC_ZEAL
         || rt->gcZeal >= 1
 #endif
         ) {
         rt->gcRegenShapes = true;
-        rt->shapeGen = Shape::LAST_RESERVED_SHAPE;
+        rt->shapeGen = 0;
         rt->protoHazardShape = 0;
     }
 
     if (rt->gcCurrentCompartment) {
         rt->gcCurrentCompartment->purge(cx);
     } else {
         for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
             (*c)->purge(cx);
@@ -2268,31 +2257,35 @@ PreGCCleanup(JSContext *cx, JSGCInvocati
 
 static void
 MarkAndSweepCompartment(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
 {
     JSRuntime *rt = cx->runtime;
     rt->gcNumber++;
     JS_ASSERT(!rt->gcRegenShapes);
     JS_ASSERT(gckind != GC_LAST_CONTEXT);
+    JS_ASSERT(comp != rt->atomsCompartment);
+    JS_ASSERT(!comp->isMarked());
 
     /*
      * Mark phase.
      */
     GCMarker gcmarker(cx);
     JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
     JS_ASSERT(gcmarker.getMarkColor() == BLACK);
     rt->gcMarkingTracer = &gcmarker;
     gcmarker.stackLimit = cx->stackLimit;
 
     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
          r.front()->clearMarkBitmap();
 
     for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
-        (*c)->mark(&gcmarker);
+        (*c)->markCrossCompartment(&gcmarker);
+
+    comp->mark(&gcmarker);
 
     MarkRuntime(&gcmarker);
 
     /*
      * Mark children of things that caused too deep recursion during the above
      * tracing.
      */
     gcmarker.markDelayedChildren();
@@ -2353,29 +2346,33 @@ MarkAndSweepCompartment(JSContext *cx, J
     comp->sweep(cx, 0);
 
     comp->finalizeObjectArenaLists(cx);
     TIMESTAMP(sweepObjectEnd);
 
     comp->finalizeStringArenaLists(cx);
     TIMESTAMP(sweepStringEnd);
 
-    /*
-     * Unmark the runtime's property trees because we don't
-     * sweep them.
-     */
-    js::PropertyTree::unmarkShapes(cx);
+#ifdef DEBUG
+    /* Make sure that we didn't mark a Shape in another compartment. */
+    for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
+        JS_ASSERT_IF(*c != comp, (*c)->propertyTree.checkShapesAllUnmarked(cx));
+
+    PropertyTree::dumpShapes(cx);
+#endif
 
     /*
      * Destroy arenas after we finished the sweeping so finalizers can safely
      * use js_IsAboutToBeFinalized().
      */
     ExpireGCChunks(rt);
     TIMESTAMP(sweepDestroyEnd);
 
+    comp->clearMark();
+
     if (rt->gcCallback)
         (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
 }
 
 /*
  * Perform mark-and-sweep GC.
  *
  * In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each
@@ -2464,39 +2461,48 @@ MarkAndSweep(JSContext *cx, JSGCInvocati
 
     TIMESTAMP(sweepObjectEnd);
 
     for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
         (*c)->finalizeStringArenaLists(cx);
 
     TIMESTAMP(sweepStringEnd);
 
-    SweepCompartments(cx, gckind);
-
     /*
      * Sweep the runtime's property trees after finalizing objects, in case any
      * had watchpoints referencing tree nodes.
+     *
+     * Do this before sweeping compartments, so that we sweep all shapes in
+     * unreachable compartments.
      */
-    js::PropertyTree::sweepShapes(cx);
+    for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
+        (*c)->propertyTree.sweepShapes(cx);
+
+    PropertyTree::dumpShapes(cx);
+
+    SweepCompartments(cx, gckind);
 
     /*
      * Sweep script filenames after sweeping functions in the generic loop
      * above. In this way when a scripted function's finalizer destroys the
      * script and calls rt->destroyScriptHook, the hook can still access the
      * script's filename. See bug 323267.
      */
     js_SweepScriptFilenames(rt);
 
     /*
      * Destroy arenas after we finished the sweeping so finalizers can safely
      * use js_IsAboutToBeFinalized().
      */
     ExpireGCChunks(rt);
     TIMESTAMP(sweepDestroyEnd);
 
+    for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
+        (*c)->clearMark();
+
     if (rt->gcCallback)
         (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
 #ifdef DEBUG_srcnotesize
   { extern void DumpSrcNoteSizeHist();
     DumpSrcNoteSizeHist();
     printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes);
   }
 #endif
@@ -2697,16 +2703,25 @@ GCUntilDone(JSContext *cx, JSCompartment
             LetOtherGCFinish(cx);
         }
 #endif
         return;
     }
 
     AutoGCSession gcsession(cx);
 
+    for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
+        JS_ASSERT(!(*c)->isMarked());
+
+    /*
+     * We should not be depending on cx->compartment in the GC, so set it to
+     * NULL to look for violations.
+     */
+    SwitchToCompartment(cx, (JSCompartment *)NULL);
+    
     JS_ASSERT(!rt->gcCurrentCompartment);
     rt->gcCurrentCompartment = comp;
 
     METER(rt->gcStats.poke++);
 
     bool firstRun = true;
     rt->gcMarkAndSweep = true;
 #ifdef JS_THREADSAFE
@@ -2739,20 +2754,18 @@ GCUntilDone(JSContext *cx, JSCompartment
     rt->gcHelperThread.startBackgroundSweep(rt);
 #endif
 
     rt->gcMarkAndSweep = false;
     rt->gcRegenShapes = false;
     rt->setGCLastBytes(rt->gcBytes);
     rt->gcCurrentCompartment = NULL;
 
-    for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
+    for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
         (*c)->setGCLastBytes((*c)->gcBytes);
-        (*c)->marked = false;
-    }
 }
 
 void
 js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
 {
     JSRuntime *rt = cx->runtime;
 
     /*
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -422,17 +422,17 @@ NewIteratorObject(JSContext *cx, uintN f
          * are not stillborn, with the exception of "NoSuchMethod" internal
          * helper objects) expect it to have a non-null map pointer, so we
          * share an empty Enumerator scope in the runtime.
          */
         JSObject *obj = js_NewGCObject(cx, FINALIZE_OBJECT0);
         if (!obj)
             return false;
         obj->init(cx, &js_IteratorClass, NULL, NULL, NULL, false);
-        obj->setMap(cx->runtime->emptyEnumeratorShape);
+        obj->setMap(cx->compartment->emptyEnumeratorShape);
         return obj;
     }
 
     return NewBuiltinClassInstance(cx, &js_IteratorClass);
 }
 
 NativeIterator *
 NativeIterator::allocateIterator(JSContext *cx, uint32 slength, const AutoIdVector &props)
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -3249,17 +3249,17 @@ js_NewWithObject(JSContext *cx, JSObject
 
     obj = js_NewGCObject(cx, FINALIZE_OBJECT2);
     if (!obj)
         return NULL;
 
     JSStackFrame *priv = js_FloatingFrameIfGenerator(cx, cx->fp());
 
     obj->init(cx, &js_WithClass, proto, parent, priv, false);
-    obj->setMap(cx->runtime->emptyWithShape);
+    obj->setMap(cx->compartment->emptyWithShape);
     OBJ_SET_BLOCK_DEPTH(cx, obj, depth);
 
     AutoObjectRooter tvr(cx, obj);
     JSObject *thisp = proto->thisObject(cx);
     if (!thisp)
         return NULL;
 
     assertSameCompartment(cx, obj, thisp);
@@ -3275,17 +3275,17 @@ js_NewBlockObject(JSContext *cx)
      * Null obj's proto slot so that Object.prototype.* does not pollute block
      * scopes and to give the block object its own scope.
      */
     JSObject *blockObj = js_NewGCObject(cx, FINALIZE_OBJECT2);
     if (!blockObj)
         return NULL;
 
     blockObj->init(cx, &js_BlockClass, NULL, NULL, NULL, false);
-    blockObj->setMap(cx->runtime->emptyBlockShape);
+    blockObj->setMap(cx->compartment->emptyBlockShape);
     return blockObj;
 }
 
 JSObject *
 js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
 {
     JS_ASSERT(proto->isStaticBlock());
 
@@ -4724,17 +4724,17 @@ js_DefineNativeProperty(JSContext *cx, J
         js_PurgeScopeChain(cx, obj, id);
 
     /*
      * Check whether a readonly property or setter is being defined on a known
      * prototype object. See the comment in jscntxt.h before protoHazardShape's
      * member declaration.
      */
     if (obj->isDelegate() && (attrs & (JSPROP_READONLY | JSPROP_SETTER)))
-        cx->runtime->protoHazardShape = js_GenerateShape(cx, false);
+        cx->runtime->protoHazardShape = js_GenerateShape(cx);
 
     /* Use the object's class getter and setter by default. */
     Class *clasp = obj->getClass();
     if (!(defineHow & JSDNP_SET_METHOD)) {
         if (!getter && !(attrs & JSPROP_GETTER))
             getter = clasp->getProperty;
         if (!setter && !(attrs & JSPROP_SETTER))
             setter = clasp->setProperty;
@@ -6533,17 +6533,17 @@ js_TraceObject(JSTracer *trc, JSObject *
     if (clasp->mark) {
         if (clasp->flags & JSCLASS_MARK_IS_TRACE)
             ((JSTraceOp) clasp->mark)(trc, obj);
         else if (IS_GC_MARKING_TRACER(trc))
             (void) clasp->mark(cx, obj, trc);
     }
     if (clasp->flags & JSCLASS_IS_GLOBAL) {
         JSCompartment *compartment = obj->getCompartment();
-        compartment->marked = true;
+        compartment->mark(trc);
     }
 
     /*
      * NB: clasp->mark could mutate something (which would be a bug, but we are
      * defensive), so don't hoist this above calling clasp->mark.
      */
     uint32 nslots = Min(obj->numSlots(), obj->slotSpan());
     for (uint32 i = 0; i != nslots; ++i) {
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -311,16 +311,20 @@ struct JSObject : js::gc::Cell {
     friend class GetPropCompiler;
 
     /*
      * Private pointer to the last added property and methods to manipulate the
      * list it links among properties in this scope. The {remove,insert} pair
      * for DictionaryProperties assert that the scope is in dictionary mode and
      * any reachable properties are flagged as dictionary properties.
      *
+     * For native objects, this field is always a Shape. For non-native objects,
+     * it points to the singleton sharedNonNative JSObjectMap, whose shape field
+     * is SHAPELESS.
+     *
      * NB: these private methods do *not* update this scope's shape to track
      * lastProp->shape after they finish updating the linked list in the case
      * where lastProp is updated. It is up to calling code in jsscope.cpp to
      * call updateShape(cx) after updating lastProp.
      */
     union {
         js::Shape       *lastProp;
         JSObjectMap     *map;
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -74,416 +74,203 @@ PropertyTree::init()
 }
 
 void
 PropertyTree::finish()
 {
     JS_FinishArenaPool(&arenaPool);
 }
 
-/*
- * NB: Called with cx->runtime->gcLock held if gcLocked is true.
- * On failure, return null after unlocking the GC and reporting out of memory.
- */
+/* On failure, returns NULL. Does not report out of memory. */
 Shape *
-PropertyTree::newShape(JSContext *cx, bool gcLocked)
+PropertyTree::newShapeUnchecked()
 {
     Shape *shape;
 
-    if (!gcLocked)
-        JS_LOCK_GC(cx->runtime);
     shape = freeList;
     if (shape) {
         shape->removeFree();
     } else {
         JS_ARENA_ALLOCATE_CAST(shape, Shape *, &arenaPool, sizeof(Shape));
-        if (!shape) {
-            JS_UNLOCK_GC(cx->runtime);
-            JS_ReportOutOfMemory(cx);
+        if (!shape)
             return NULL;
-        }
     }
-    if (!gcLocked)
-        JS_UNLOCK_GC(cx->runtime);
+
+#ifdef DEBUG
+    shape->compartment = compartment;
+#endif
 
-    JS_RUNTIME_METER(cx->runtime, livePropTreeNodes);
-    JS_RUNTIME_METER(cx->runtime, totalPropTreeNodes);
+    JS_COMPARTMENT_METER(compartment->livePropTreeNodes++);
+    JS_COMPARTMENT_METER(compartment->totalPropTreeNodes++);
+    return shape;
+}
+
+Shape *
+PropertyTree::newShape(JSContext *cx)
+{
+    Shape *shape = newShapeUnchecked();
+    if (!shape)
+        JS_ReportOutOfMemory(cx);
     return shape;
 }
 
-/*
- * NB: Called with cx->runtime->gcLock held, always.
- * On failure, return null after unlocking the GC and reporting out of memory.
- */
-KidsChunk *
-KidsChunk::create(JSContext *cx)
+static KidsHash *
+HashChildren(Shape *kid1, Shape *kid2)
 {
-    KidsChunk *chunk;
+    void *mem = js_malloc(sizeof(KidsHash));
+    if (!mem)
+        return NULL;
 
-    chunk = (KidsChunk *) js_calloc(sizeof *chunk);
-    if (!chunk) {
-        JS_UNLOCK_GC(cx->runtime);
-        JS_ReportOutOfMemory(cx);
+    KidsHash *hash = new (mem) KidsHash();
+    if (!hash->init(2)) {
+        js_free(hash);
         return NULL;
     }
-    JS_RUNTIME_METER(cx->runtime, propTreeKidsChunks);
-    return chunk;
+
+    KidsHash::AddPtr addPtr = hash->lookupForAdd(kid1);
+    JS_ALWAYS_TRUE(hash->add(addPtr, kid1));
+
+    addPtr = hash->lookupForAdd(kid2);
+    JS_ASSERT(!addPtr.found());
+    JS_ALWAYS_TRUE(hash->add(addPtr, kid2));
+
+    return hash;
 }
 
-KidsChunk *
-KidsChunk::destroy(JSContext *cx, KidsChunk *chunk)
-{
-    JS_RUNTIME_UNMETER(cx->runtime, propTreeKidsChunks);
-
-    KidsChunk *nextChunk = chunk->next;
-    js_free(chunk);
-    return nextChunk;
-}
-
-/*
- * NB: Called with cx->runtime->gcLock held, always.
- * On failure, return false after unlocking the GC and reporting out of memory.
- */
 bool
 PropertyTree::insertChild(JSContext *cx, Shape *parent, Shape *child)
 {
     JS_ASSERT(!parent->inDictionary());
     JS_ASSERT(!child->parent);
     JS_ASSERT(!child->inDictionary());
     JS_ASSERT(!JSID_IS_VOID(parent->id));
     JS_ASSERT(!JSID_IS_VOID(child->id));
-
-    child->setParent(parent);
+    JS_ASSERT(cx->compartment == compartment);
+    JS_ASSERT(child->compartment == parent->compartment);
 
     KidsPointer *kidp = &parent->kids;
+
     if (kidp->isNull()) {
+        child->setParent(parent);
         kidp->setShape(child);
         return true;
     }
 
-    Shape *shape;
-
     if (kidp->isShape()) {
-        shape = kidp->toShape();
+        Shape *shape = kidp->toShape();
         JS_ASSERT(shape != child);
-        if (shape->matches(child)) {
-            /*
-             * Duplicate child created while racing to getChild on the same
-             * node label. See PropertyTree::getChild, further below.
-             */
-            JS_RUNTIME_METER(cx->runtime, duplicatePropTreeNodes);
+        JS_ASSERT(!shape->matches(child));
+
+        KidsHash *hash = HashChildren(shape, child);
+        if (!hash) {
+            JS_ReportOutOfMemory(cx);
+            return false;
         }
-
-        KidsChunk *chunk = KidsChunk::create(cx);
-        if (!chunk)
-            return false;
-        parent->kids.setChunk(chunk);
-        chunk->kids[0] = shape;
-        chunk->kids[1] = child;
+        kidp->setHash(hash);
+        child->setParent(parent);
         return true;
     }
 
-    if (kidp->isChunk()) {
-        KidsChunk **chunkp;
-        KidsChunk *chunk = kidp->toChunk();
-
-        do {
-            for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
-                shape = chunk->kids[i];
-                if (!shape) {
-                    chunk->kids[i] = child;
-                    return true;
-                }
-
-                JS_ASSERT(shape != child);
-                if (shape->matches(child)) {
-                    /*
-                     * Duplicate child, see comment above. In this case, we
-                     * must let the duplicate be inserted at this level in the
-                     * tree, so we keep iterating, looking for an empty slot in
-                     * which to insert.
-                     */
-                    JS_ASSERT(shape != child);
-                    JS_RUNTIME_METER(cx->runtime, duplicatePropTreeNodes);
-                }
-            }
-            chunkp = &chunk->next;
-        } while ((chunk = *chunkp) != NULL);
-
-        chunk = KidsChunk::create(cx);
-        if (!chunk)
-            return false;
-        *chunkp = chunk;
-        chunk->kids[0] = child;
-        return true;
-    }
-   
     KidsHash *hash = kidp->toHash();
     KidsHash::AddPtr addPtr = hash->lookupForAdd(child);
-    if (!addPtr) {
-        if (!hash->add(addPtr, child)) {
-            JS_UNLOCK_GC(cx->runtime);
-            JS_ReportOutOfMemory(cx);
-            return false;
-        }
-    } else {
-        // FIXME ignore duplicate child case here, going thread-local soon!
+    JS_ASSERT(!addPtr.found());
+    if (!hash->add(addPtr, child)) {
+        JS_ReportOutOfMemory(cx);
+        return false;
     }
+    child->setParent(parent);
     return true;
 }
 
-/* NB: Called with cx->runtime->gcLock held. */
 void
-PropertyTree::removeChild(JSContext *cx, Shape *child)
+PropertyTree::removeChild(Shape *child)
 {
     JS_ASSERT(!child->inDictionary());
 
     Shape *parent = child->parent;
     JS_ASSERT(parent);
     JS_ASSERT(!JSID_IS_VOID(parent->id));
 
     KidsPointer *kidp = &parent->kids;
     if (kidp->isShape()) {
         Shape *kid = kidp->toShape();
         if (kid == child)
             parent->kids.setNull();
         return;
     }
 
-    if (kidp->isChunk()) {
-        KidsChunk *list = kidp->toChunk();
-        KidsChunk *chunk = list;
-        KidsChunk **chunkp = &list;
-
-        do {
-            for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
-                if (chunk->kids[i] == child) {
-                    KidsChunk *lastChunk = chunk;
-
-                    uintN j;
-                    if (!lastChunk->next) {
-                        j = i + 1;
-                    } else {
-                        j = 0;
-                        do {
-                            chunkp = &lastChunk->next;
-                            lastChunk = *chunkp;
-                        } while (lastChunk->next);
-                    }
-                    for (; j < MAX_KIDS_PER_CHUNK; j++) {
-                        if (!lastChunk->kids[j])
-                            break;
-                    }
-                    --j;
-
-                    if (chunk != lastChunk || j > i)
-                        chunk->kids[i] = lastChunk->kids[j];
-                    lastChunk->kids[j] = NULL;
-                    if (j == 0) {
-                        *chunkp = NULL;
-                        if (!list)
-                            parent->kids.setNull();
-                        KidsChunk::destroy(cx, lastChunk);
-                    }
-                    return;
-                }
-            }
-
-            chunkp = &chunk->next;
-        } while ((chunk = *chunkp) != NULL);
-        return;
-    }
-
     kidp->toHash()->remove(child);
 }
 
-static KidsHash *
-HashChunks(KidsChunk *chunk, uintN n)
-{
-    void *mem = js_malloc(sizeof(KidsHash));
-    if (!mem)
-        return NULL;
-
-    KidsHash *hash = new (mem) KidsHash();
-    if (!hash->init(n)) {
-        js_free(hash);
-        return NULL;
-    }
-
-    do {
-        for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
-            Shape *shape = chunk->kids[i];
-            if (!shape)
-                break;
-            KidsHash::AddPtr addPtr = hash->lookupForAdd(shape);
-            if (!addPtr) {
-                /*
-                 * Infallible, we right-sized via hash->init(n) just above.
-                 * Assert just in case jshashtable.h ever regresses.
-                 */
-                JS_ALWAYS_TRUE(hash->add(addPtr, shape));
-            } else {
-                /*
-                 * Duplicate child case, we don't handle this race,
-                 * multi-threaded shapes are going away...
-                 */
-            }
-        }
-    } while ((chunk = chunk->next) != NULL);
-    return hash;
-}
-
-/*
- * Called without cx->runtime->gcLock held. This function acquires that lock
- * only when inserting a new child.  Thus there may be races to find or add a
- * node that result in duplicates.  We expect such races to be rare!
- *
- * We use cx->runtime->gcLock, not ...->rtLock, to avoid nesting the former
- * inside the latter in js_GenerateShape below.
- */
 Shape *
 PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child)
 {
     Shape *shape;
 
     JS_ASSERT(parent);
     JS_ASSERT(!JSID_IS_VOID(parent->id));
 
     /*
-     * Because chunks are appended at the end and never deleted except by
-     * the GC, we can search without taking the runtime's GC lock.  We may
-     * miss a matching shape added by another thread, and make a duplicate
-     * one, but that is an unlikely, therefore small, cost.  The property
-     * tree has extremely low fan-out below its root in popular embeddings
-     * with real-world workloads.
-     *
-     * Patterns such as defining closures that capture a constructor's
-     * environment as getters or setters on the new object that is passed
-     * in as |this| can significantly increase fan-out below the property
+     * The property tree has extremely low fan-out below its root in
+     * popular embeddings with real-world workloads. Patterns such as
+     * defining closures that capture a constructor's environment as
+     * getters or setters on the new object that is passed in as
+     * |this| can significantly increase fan-out below the property
      * tree root -- see bug 335700 for details.
      */
     KidsPointer *kidp = &parent->kids;
-    if (!kidp->isNull()) {
-        if (kidp->isShape()) {
-            shape = kidp->toShape();
-            if (shape->matches(&child))
-                return shape;
-        } else if (kidp->isChunk()) {
-            KidsChunk *chunk = kidp->toChunk();
-
-            uintN n = 0;
-            do {
-                for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
-                    shape = chunk->kids[i];
-                    if (!shape) {
-                        n += i;
-                        if (n >= CHUNK_HASH_THRESHOLD) {
-                            /*
-                             * kidp->isChunk() was true, but if we're racing it
-                             * may not be by this point. FIXME: thread "safety"
-                             * is for the birds!
-                             */
-                            if (!kidp->isHash()) {
-                                chunk = kidp->toChunk();
-
-                                KidsHash *hash = HashChunks(chunk, n);
-                                if (!hash) {
-                                    JS_ReportOutOfMemory(cx);
-                                    return NULL;
-                                }
-
-                                JS_LOCK_GC(cx->runtime);
-                                if (kidp->isHash()) {
-                                    hash->~KidsHash();
-                                    js_free(hash);
-                                } else {
-                                    // FIXME unsafe race with kidp->is/toChunk() above.
-                                    // But this is all going single-threaded soon...
-                                    while (chunk)
-                                        chunk = KidsChunk::destroy(cx, chunk);
-                                    kidp->setHash(hash);
-                                }
-                                goto locked_not_found;
-                            }
-                        }
-                        goto not_found;
-                    }
-
-                    if (shape->matches(&child))
-                        return shape;
-                }
-                n += MAX_KIDS_PER_CHUNK;
-            } while ((chunk = chunk->next) != NULL);
-        } else {
-            JS_LOCK_GC(cx->runtime);
-            shape = *kidp->toHash()->lookup(&child);
-            if (shape)
-                goto out;
-            goto locked_not_found;
-        }
+    if (kidp->isShape()) {
+        shape = kidp->toShape();
+        if (shape->matches(&child))
+            return shape;
+    } else if (kidp->isHash()) {
+        shape = *kidp->toHash()->lookup(&child);
+        if (shape)
+            return shape;
+    } else {
+        /* If kidp->isNull(), we always insert. */
     }
 
-  not_found:
-    JS_LOCK_GC(cx->runtime);
-
-  locked_not_found:
-    shape = newShape(cx, true);
+    shape = newShape(cx);
     if (!shape)
         return NULL;
 
     new (shape) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs,
-                      child.flags, child.shortid, js_GenerateShape(cx, true));
+                      child.flags, child.shortid, js_GenerateShape(cx));
 
     if (!insertChild(cx, parent, shape))
         return NULL;
 
-  out:
-    JS_UNLOCK_GC(cx->runtime);
     return shape;
 }
 
 #ifdef DEBUG
 
 void
 KidsPointer::checkConsistency(const Shape *aKid) const
 {
     if (isShape()) {
         JS_ASSERT(toShape() == aKid);
-    } else if (isChunk()) {
-        bool found = false;
-        for (KidsChunk *chunk = toChunk(); chunk; chunk = chunk->next) {
-            for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
-                if (!chunk->kids[i]) {
-                    JS_ASSERT(!chunk->next);
-                    for (uintN j = i + 1; j < MAX_KIDS_PER_CHUNK; j++)
-                        JS_ASSERT(!chunk->kids[j]);
-                    break;
-                }
-                if (chunk->kids[i] == aKid) {
-                    JS_ASSERT(!found);
-                    found = true;
-                }
-            }
-        }
-        JS_ASSERT(found);
     } else {
         JS_ASSERT(isHash());
         KidsHash *hash = toHash();
         KidsHash::Ptr ptr = hash->lookup(aKid);
         JS_ASSERT(*ptr == aKid);
     }
 }
 
 void
 Shape::dump(JSContext *cx, FILE *fp) const
 {
     JS_ASSERT(!JSID_IS_VOID(id));
 
     if (JSID_IS_INT(id)) {
         fprintf(fp, "[%ld]", (long) JSID_TO_INT(id));
+    } else if (JSID_IS_DEFAULT_XML_NAMESPACE(id)) {
+        fprintf(fp, "<default XML namespace>");
     } else {
         JSLinearString *str;
         if (JSID_IS_ATOM(id)) {
             str = JSID_TO_ATOM(id);
         } else {
             JS_ASSERT(JSID_IS_OBJECT(id));
             JSString *s = js_ValueToString(cx, IdToValue(id));
             fputs("object ", fp);
@@ -536,39 +323,27 @@ MeterKidCount(JSBasicStats *bs, uintN nk
 {
     JS_BASIC_STATS_ACCUM(bs, nkids);
 }
 
 void
 js::PropertyTree::meter(JSBasicStats *bs, Shape *node)
 {
     uintN nkids = 0;
-    const KidsPointer &kids = node->kids;
-    if (!kids.isNull()) {
-        if (kids.isShape()) {
-            meter(bs, kids.toShape());
-            nkids = 1;
-        } else if (kids.isChunk()) {
-            for (KidsChunk *chunk = kids.toChunk(); chunk; chunk = chunk->next) {
-                for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
-                    Shape *kid = chunk->kids[i];
-                    if (!kid)
-                        break;
-                    meter(bs, kid);
-                    nkids++;
-                }
-            }
-        } else {
-            const KidsHash &hash = *kids.toHash();
-            for (KidsHash::Range range = hash.all(); !range.empty(); range.popFront()) {
-                Shape *kid = range.front();
-
-                meter(bs, kid);
-                nkids++;
-            }
+    const KidsPointer &kidp = node->kids;
+    if (kidp.isShape()) {
+        meter(bs, kidp.toShape());
+        nkids = 1;
+    } else if (kidp.isHash()) {
+        const KidsHash &hash = *kidp.toHash();
+        for (KidsHash::Range range = hash.all(); !range.empty(); range.popFront()) {
+            Shape *kid = range.front();
+            
+            meter(bs, kid);
+            nkids++;
         }
     }
 
     MeterKidCount(bs, nkids);
 }
 
 void
 Shape::dumpSubtree(JSContext *cx, int level, FILE *fp) const
@@ -583,78 +358,44 @@ Shape::dumpSubtree(JSContext *cx, int le
     }
 
     if (!kids.isNull()) {
         ++level;
         if (kids.isShape()) {
             Shape *kid = kids.toShape();
             JS_ASSERT(kid->parent == this);
             kid->dumpSubtree(cx, level, fp);
-        } else if (kids.isChunk()) {
-            KidsChunk *chunk = kids.toChunk();
-            do {
-                for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
-                    Shape *kid = chunk->kids[i];
-                    if (!kid)
-                        break;
-                    JS_ASSERT(kid->parent == this);
-                    kid->dumpSubtree(cx, level, fp);
-                }
-            } while ((chunk = chunk->next) != NULL);
         } else {
             const KidsHash &hash = *kids.toHash();
             for (KidsHash::Range range = hash.all(); !range.empty(); range.popFront()) {
                 Shape *kid = range.front();
 
                 JS_ASSERT(kid->parent == this);
                 kid->dumpSubtree(cx, level, fp);
             }
         }
     }
 }
 
 #endif /* DEBUG */
 
 JS_ALWAYS_INLINE void
-js::PropertyTree::orphanKids(JSContext *cx, Shape *shape)
+js::PropertyTree::orphanChildren(Shape *shape)
 {
     KidsPointer *kidp = &shape->kids;
 
     JS_ASSERT(!kidp->isNull());
 
-    /*
-     * Note that JS_PROPERTY_TREE(cx).removeChild(cx, shape) precedes the call
-     * to orphanKids in sweepShapes, below. Therefore the grandparent must have
-     * either no kids left, or else space in chunks or a hash for more than one
-     * kid.
-     */
-    JS_ASSERT_IF(shape->parent, !shape->parent->kids.isShape());
-
     if (kidp->isShape()) {
         Shape *kid = kidp->toShape();
 
         if (!JSID_IS_VOID(kid->id)) {
             JS_ASSERT(kid->parent == shape);
             kid->parent = NULL;
         }
-    } else if (kidp->isChunk()) {
-        KidsChunk *chunk = kidp->toChunk();
-
-        do {
-            for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
-                Shape *kid = chunk->kids[i];
-                if (!kid)
-                    break;
-
-                if (!JSID_IS_VOID(kid->id)) {
-                    JS_ASSERT(kid->parent == shape);
-                    kid->parent = NULL;
-                }
-            }
-        } while ((chunk = KidsChunk::destroy(cx, chunk)) != NULL);
     } else {
         KidsHash *hash = kidp->toHash();
 
         for (KidsHash::Range range = hash->all(); !range.empty(); range.popFront()) {
             Shape *kid = range.front();
             if (!JSID_IS_VOID(kid->id)) {
                 JS_ASSERT(kid->parent == shape);
                 kid->parent = NULL;
@@ -666,42 +407,44 @@ js::PropertyTree::orphanKids(JSContext *
     }
 
     kidp->setNull();
 }
 
 void
 js::PropertyTree::sweepShapes(JSContext *cx)
 {
+    JSRuntime *rt = compartment->rt;
+
 #ifdef DEBUG
     JSBasicStats bs;
     uint32 livePropCapacity = 0, totalLiveCount = 0;
     static FILE *logfp;
     if (!logfp) {
-        if (const char *filename = cx->runtime->propTreeStatFilename)
+        if (const char *filename = rt->propTreeStatFilename)
             logfp = fopen(filename, "w");
     }
 
     if (logfp) {
         JS_BASIC_STATS_INIT(&bs);
 
         uint32 empties;
         {
-            typedef JSRuntime::EmptyShapeSet HS;
+            typedef JSCompartment::EmptyShapeSet HS;
 
-            HS &h = cx->runtime->emptyShapes;
+            HS &h = compartment->emptyShapes;
             empties = h.count();
             MeterKidCount(&bs, empties);
             for (HS::Range r = h.all(); !r.empty(); r.popFront())
                 meter(&bs, r.front());
         }
 
-        double props = cx->runtime->liveObjectPropsPreSweep;
-        double nodes = cx->runtime->livePropTreeNodes;
-        double dicts = cx->runtime->liveDictModeNodes;
+        double props = rt->liveObjectPropsPreSweep;
+        double nodes = compartment->livePropTreeNodes;
+        double dicts = compartment->liveDictModeNodes;
 
         /* Empty scope nodes are never hashed, so subtract them from nodes. */
         JS_ASSERT(nodes - dicts == bs.sum);
         nodes -= empties;
 
         double sigma;
         double mean = JS_MeanAndStdDevBS(&bs, &sigma);
 
@@ -713,17 +456,17 @@ js::PropertyTree::sweepShapes(JSContext 
     }
 #endif
 
     /*
      * Sweep the heap clean of all unmarked nodes. Here we will find nodes
      * already GC'ed from the root ply, but we will avoid re-orphaning their
      * kids, because the kids member will already be null.
      */
-    JSArena **ap = &JS_PROPERTY_TREE(cx).arenaPool.first.next;
+    JSArena **ap = &arenaPool.first.next;
     while (JSArena *a = *ap) {
         Shape *limit = (Shape *) a->avail;
         uintN liveCount = 0;
 
         for (Shape *shape = (Shape *) a->base; shape < limit; shape++) {
             /* If the id is null, shape is already on the freelist. */
             if (JSID_IS_VOID(shape->id))
                 continue;
@@ -733,184 +476,204 @@ js::PropertyTree::sweepShapes(JSContext 
              * and continue the while loop.
              *
              * Regenerate shape->shape if it hasn't already been refreshed
              * during the mark phase, when live scopes' lastProp members are
              * followed to update both scope->shape and lastProp->shape.
              */
             if (shape->marked()) {
                 shape->clearMark();
-                if (cx->runtime->gcRegenShapes) {
+                if (rt->gcRegenShapes) {
                     if (shape->hasRegenFlag())
                         shape->clearRegenFlag();
                     else
-                        shape->shape = js_RegenerateShapeForGC(cx);
+                        shape->shape = js_RegenerateShapeForGC(rt);
                 }
                 liveCount++;
                 continue;
             }
 
 #ifdef DEBUG
             if ((shape->flags & Shape::SHARED_EMPTY) &&
-                cx->runtime->meterEmptyShapes()) {
-                cx->runtime->emptyShapes.remove((EmptyShape *) shape);
+                rt->meterEmptyShapes()) {
+                compartment->emptyShapes.remove((EmptyShape *) shape);
             }
 #endif
 
             if (shape->inDictionary()) {
-                JS_RUNTIME_UNMETER(cx->runtime, liveDictModeNodes);
+                JS_COMPARTMENT_METER(compartment->liveDictModeNodes--);
             } else {
                 /*
                  * Here, shape is garbage to collect, but its parent might not
-                 * be, so we may have to remove it from its parent's kids hash,
-                 * chunk list, or kid singleton pointer set.
+                 * be, so we may have to remove it from its parent's kids hash
+                 * or kid singleton pointer set.
                  *
                  * Without a separate mark-clearing pass, we can't tell whether
                  * shape->parent is live at this point, so we must remove shape
                  * if its parent member is non-null. A saving grace: if shape's
                  * parent is dead and swept by this point, shape->parent will
                  * be null -- in the next paragraph, we null all of a property
                  * tree node's kids' parent links when sweeping that node.
                  */
                 if (shape->parent)
-                    JS_PROPERTY_TREE(cx).removeChild(cx, shape);
+                    removeChild(shape);
 
                 if (!shape->kids.isNull())
-                    orphanKids(cx, shape);
+                    orphanChildren(shape);
             }
 
             /*
              * Note that Shape::insertFree nulls shape->id so we know that
              * shape is on the freelist.
              */
             shape->freeTable(cx);
-            shape->insertFree(&JS_PROPERTY_TREE(cx).freeList);
-            JS_RUNTIME_UNMETER(cx->runtime, livePropTreeNodes);
+            shape->insertFree(&freeList);
+            JS_COMPARTMENT_METER(compartment->livePropTreeNodes--);
         }
 
         /* If a contains no live properties, return it to the malloc heap. */
         if (liveCount == 0) {
             for (Shape *shape = (Shape *) a->base; shape < limit; shape++)
                 shape->removeFree();
-            JS_ARENA_DESTROY(&JS_PROPERTY_TREE(cx).arenaPool, a, ap);
+            JS_ARENA_DESTROY(&arenaPool, a, ap);
         } else {
 #ifdef DEBUG
             livePropCapacity += limit - (Shape *) a->base;
             totalLiveCount += liveCount;
 #endif
             ap = &a->next;
         }
     }
 
 #ifdef DEBUG
     if (logfp) {
         fprintf(logfp,
                 "\nProperty tree stats for gcNumber %lu\n",
-                (unsigned long) cx->runtime->gcNumber);
+                (unsigned long) rt->gcNumber);
 
         fprintf(logfp, "arenautil %g%%\n",
                 (totalLiveCount && livePropCapacity)
                 ? (totalLiveCount * 100.0) / livePropCapacity
                 : 0.0);
 
 #define RATE(f1, f2) (((double)js_scope_stats.f1 / js_scope_stats.f2) * 100.0)
 
-        fprintf(logfp,
-                "Scope search stats:\n"
-                "  searches:        %6u\n"
-                "  hits:            %6u %5.2f%% of searches\n"
-                "  misses:          %6u %5.2f%%\n"
-                "  hashes:          %6u %5.2f%%\n"
-                "  hashHits:        %6u %5.2f%% (%5.2f%% of hashes)\n"
-                "  hashMisses:      %6u %5.2f%% (%5.2f%%)\n"
-                "  steps:           %6u %5.2f%% (%5.2f%%)\n"
-                "  stepHits:        %6u %5.2f%% (%5.2f%%)\n"
-                "  stepMisses:      %6u %5.2f%% (%5.2f%%)\n"
-                "  initSearches:    %6u\n"
-                "  changeSearches:  %6u\n"
-                "  tableAllocFails: %6u\n"
-                "  toDictFails:     %6u\n"
-                "  wrapWatchFails:  %6u\n"
-                "  adds:            %6u\n"
-                "  addFails:        %6u\n"
-                "  puts:            %6u\n"
-                "  redundantPuts:   %6u\n"
-                "  putFails:        %6u\n"
-                "  changes:         %6u\n"
-                "  changeFails:     %6u\n"
-                "  compresses:      %6u\n"
-                "  grows:           %6u\n"
-                "  removes:         %6u\n"
-                "  removeFrees:     %6u\n"
-                "  uselessRemoves:  %6u\n"
-                "  shrinks:         %6u\n",
-                js_scope_stats.searches,
-                js_scope_stats.hits, RATE(hits, searches),
-                js_scope_stats.misses, RATE(misses, searches),
-                js_scope_stats.hashes, RATE(hashes, searches),
-                js_scope_stats.hashHits, RATE(hashHits, searches), RATE(hashHits, hashes),
-                js_scope_stats.hashMisses, RATE(hashMisses, searches), RATE(hashMisses, hashes),
-                js_scope_stats.steps, RATE(steps, searches), RATE(steps, hashes),
-                js_scope_stats.stepHits, RATE(stepHits, searches), RATE(stepHits, hashes),
-                js_scope_stats.stepMisses, RATE(stepMisses, searches), RATE(stepMisses, hashes),
-                js_scope_stats.initSearches,
-                js_scope_stats.changeSearches,
-                js_scope_stats.tableAllocFails,
-                js_scope_stats.toDictFails,
-                js_scope_stats.wrapWatchFails,
-                js_scope_stats.adds,
-                js_scope_stats.addFails,
-                js_scope_stats.puts,
-                js_scope_stats.redundantPuts,
-                js_scope_stats.putFails,
-                js_scope_stats.changes,
-                js_scope_stats.changeFails,
-                js_scope_stats.compresses,
-                js_scope_stats.grows,
-                js_scope_stats.removes,
-                js_scope_stats.removeFrees,
-                js_scope_stats.uselessRemoves,
-                js_scope_stats.shrinks);
+        /* This data is global, so only print it once per GC. */
+        if (compartment == rt->atomsCompartment) {
+            fprintf(logfp,
+                    "Scope search stats:\n"
+                    "  searches:        %6u\n"
+                    "  hits:            %6u %5.2f%% of searches\n"
+                    "  misses:          %6u %5.2f%%\n"
+                    "  hashes:          %6u %5.2f%%\n"
+                    "  hashHits:        %6u %5.2f%% (%5.2f%% of hashes)\n"
+                    "  hashMisses:      %6u %5.2f%% (%5.2f%%)\n"
+                    "  steps:           %6u %5.2f%% (%5.2f%%)\n"
+                    "  stepHits:        %6u %5.2f%% (%5.2f%%)\n"
+                    "  stepMisses:      %6u %5.2f%% (%5.2f%%)\n"
+                    "  initSearches:    %6u\n"
+                    "  changeSearches:  %6u\n"
+                    "  tableAllocFails: %6u\n"
+                    "  toDictFails:     %6u\n"
+                    "  wrapWatchFails:  %6u\n"
+                    "  adds:            %6u\n"
+                    "  addFails:        %6u\n"
+                    "  puts:            %6u\n"
+                    "  redundantPuts:   %6u\n"
+                    "  putFails:        %6u\n"
+                    "  changes:         %6u\n"
+                    "  changeFails:     %6u\n"
+                    "  compresses:      %6u\n"
+                    "  grows:           %6u\n"
+                    "  removes:         %6u\n"
+                    "  removeFrees:     %6u\n"
+                    "  uselessRemoves:  %6u\n"
+                    "  shrinks:         %6u\n",
+                    js_scope_stats.searches,
+                    js_scope_stats.hits, RATE(hits, searches),
+                    js_scope_stats.misses, RATE(misses, searches),
+                    js_scope_stats.hashes, RATE(hashes, searches),
+                    js_scope_stats.hashHits, RATE(hashHits, searches), RATE(hashHits, hashes),
+                    js_scope_stats.hashMisses, RATE(hashMisses, searches), RATE(hashMisses, hashes),
+                    js_scope_stats.steps, RATE(steps, searches), RATE(steps, hashes),
+                    js_scope_stats.stepHits, RATE(stepHits, searches), RATE(stepHits, hashes),
+                    js_scope_stats.stepMisses, RATE(stepMisses, searches), RATE(stepMisses, hashes),
+                    js_scope_stats.initSearches,
+                    js_scope_stats.changeSearches,
+                    js_scope_stats.tableAllocFails,
+                    js_scope_stats.toDictFails,
+                    js_scope_stats.wrapWatchFails,
+                    js_scope_stats.adds,
+                    js_scope_stats.addFails,
+                    js_scope_stats.puts,
+                    js_scope_stats.redundantPuts,
+                    js_scope_stats.putFails,
+                    js_scope_stats.changes,
+                    js_scope_stats.changeFails,
+                    js_scope_stats.compresses,
+                    js_scope_stats.grows,
+                    js_scope_stats.removes,
+                    js_scope_stats.removeFrees,
+                    js_scope_stats.uselessRemoves,
+                    js_scope_stats.shrinks);
+        }
 
 #undef RATE
 
         fflush(logfp);
     }
-
-    if (const char *filename = cx->runtime->propTreeDumpFilename) {
-        char pathname[1024];
-        JS_snprintf(pathname, sizeof pathname, "%s.%lu",
-                    filename, (unsigned long)cx->runtime->gcNumber);
-        FILE *dumpfp = fopen(pathname, "w");
-        if (dumpfp) {
-            typedef JSRuntime::EmptyShapeSet HS;
-
-            HS &h = cx->runtime->emptyShapes;
-            for (HS::Range r = h.all(); !r.empty(); r.popFront()) {
-                Shape *empty = r.front();
-                empty->dumpSubtree(cx, 0, dumpfp);
-                putc('\n', dumpfp);
-            }
-
-            fclose(dumpfp);
-        }
-    }
 #endif /* DEBUG */
 }
 
-void
-js::PropertyTree::unmarkShapes(JSContext *cx)
+bool
+js::PropertyTree::checkShapesAllUnmarked(JSContext *cx)
 {
-    JSArena **ap = &JS_PROPERTY_TREE(cx).arenaPool.first.next;
+    JSArena **ap = &arenaPool.first.next;
     while (JSArena *a = *ap) {
         Shape *limit = (Shape *) a->avail;
 
         for (Shape *shape = (Shape *) a->base; shape < limit; shape++) {
             /* If the id is null, shape is already on the freelist. */
             if (JSID_IS_VOID(shape->id))
                 continue;
 
             if (shape->marked())
-                shape->clearMark();
+                return false;
         }
         ap = &a->next;
     }
+
+    return true;
 }
+
+void
+js::PropertyTree::dumpShapes(JSContext *cx)
+{
+#ifdef DEBUG
+    JSRuntime *rt = cx->runtime;
+
+    if (const char *filename = rt->propTreeDumpFilename) {
+        char pathname[1024];
+        JS_snprintf(pathname, sizeof pathname, "%s.%lu",
+                    filename, (unsigned long)rt->gcNumber);
+        FILE *dumpfp = fopen(pathname, "w");
+        if (dumpfp) {
+            typedef JSCompartment::EmptyShapeSet HS;
+
+            for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
+                if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != *c)
+                    continue;
+
+                fprintf(dumpfp, "*** Compartment %p ***\n", (void *)*c);
+
+                HS &h = (*c)->emptyShapes;
+                for (HS::Range r = h.all(); !r.empty(); r.popFront()) {
+                    Shape *empty = r.front();
+                    empty->dumpSubtree(cx, 0, dumpfp);
+                    putc('\n', dumpfp);
+                }
+            }
+
+            fclose(dumpfp);
+        }
+    }
+#endif
+}
--- a/js/src/jspropertytree.h
+++ b/js/src/jspropertytree.h
@@ -41,77 +41,51 @@
 #define jspropertytree_h___
 
 #include "jsarena.h"
 #include "jshashtable.h"
 #include "jsprvtd.h"
 
 namespace js {
 
-enum {
-    MAX_KIDS_PER_CHUNK   = 10U,
-    CHUNK_HASH_THRESHOLD = 30U
-};
-
-struct KidsChunk {
-    js::Shape   *kids[MAX_KIDS_PER_CHUNK];
-    KidsChunk   *next;
-
-    static KidsChunk *create(JSContext *cx);
-    static KidsChunk *destroy(JSContext *cx, KidsChunk *chunk);
-};
-
 struct ShapeHasher {
     typedef js::Shape *Key;
     typedef const js::Shape *Lookup;
 
     static inline HashNumber hash(const Lookup l);
     static inline bool match(Key k, Lookup l);
 };
 
 typedef HashSet<js::Shape *, ShapeHasher, SystemAllocPolicy> KidsHash;
 
 class KidsPointer {
   private:
     enum {
         SHAPE = 0,
-        CHUNK = 1,
-        HASH  = 2,
-        TAG   = 3
+        HASH  = 1,
+        TAG   = 1
     };
 
     jsuword w;
 
   public:
     bool isNull() const { return !w; }
     void setNull() { w = 0; }
 
-    bool isShapeOrNull() const { return (w & TAG) == SHAPE; }
     bool isShape() const { return (w & TAG) == SHAPE && !isNull(); }
     js::Shape *toShape() const {
         JS_ASSERT(isShape());
         return reinterpret_cast<js::Shape *>(w & ~jsuword(TAG));
     }
     void setShape(js::Shape *shape) {
         JS_ASSERT(shape);
         JS_ASSERT((reinterpret_cast<jsuword>(shape) & TAG) == 0);
         w = reinterpret_cast<jsuword>(shape) | SHAPE;
     }
 
-    bool isChunk() const { return (w & TAG) == CHUNK; }
-    KidsChunk *toChunk() const {
-        JS_ASSERT(isChunk());
-        return reinterpret_cast<KidsChunk *>(w & ~jsuword(TAG));
-    }
-    void setChunk(KidsChunk *chunk) {
-        JS_ASSERT(chunk);
-        JS_ASSERT((reinterpret_cast<jsuword>(chunk) & TAG) == 0);
-        w = reinterpret_cast<jsuword>(chunk) | CHUNK;
-    }
-
     bool isHash() const { return (w & TAG) == HASH; }
     KidsHash *toHash() const {
         JS_ASSERT(isHash());
         return reinterpret_cast<KidsHash *>(w & ~jsuword(TAG));
     }
     void setHash(KidsHash *hash) {
         JS_ASSERT(hash);
         JS_ASSERT((reinterpret_cast<jsuword>(hash) & TAG) == 0);
@@ -122,34 +96,46 @@ class KidsPointer {
     void checkConsistency(const js::Shape *aKid) const;
 #endif
 };
 
 class PropertyTree
 {
     friend struct ::JSFunction;
 
-    JSArenaPool arenaPool;
-    js::Shape   *freeList;
+    JSCompartment *compartment;
+    JSArenaPool   arenaPool;
+    js::Shape     *freeList;
 
     bool insertChild(JSContext *cx, js::Shape *parent, js::Shape *child);
-    void removeChild(JSContext *cx, js::Shape *child);
+    void removeChild(js::Shape *child);
 
+    PropertyTree();
+    
   public:
     enum { MAX_HEIGHT = 64 };
 
+    PropertyTree(JSCompartment *comp)
+        : compartment(comp), freeList(NULL)
+    {
+        PodZero(&arenaPool);
+    }
+    
     bool init();
     void finish();
 
-    js::Shape *newShape(JSContext *cx, bool gcLocked = false);
+    js::Shape *newShapeUnchecked();
+    js::Shape *newShape(JSContext *cx);
     js::Shape *getChild(JSContext *cx, js::Shape *parent, const js::Shape &child);
 
-    static void orphanKids(JSContext *cx, js::Shape *shape);
-    static void sweepShapes(JSContext *cx);
-    static void unmarkShapes(JSContext *cx);
+    void orphanChildren(js::Shape *shape);
+    void sweepShapes(JSContext *cx);
+    bool checkShapesAllUnmarked(JSContext *cx);
+
+    static void dumpShapes(JSContext *cx);
 #ifdef DEBUG
     static void meter(JSBasicStats *bs, js::Shape *node);
 #endif
 };
 
 } /* namespace js */
 
 #endif /* jspropertytree_h___ */
--- a/js/src/jsproxy.cpp
+++ b/js/src/jsproxy.cpp
@@ -492,31 +492,31 @@ bool
 JSScriptedProxyHandler::getPropertyDescriptor(JSContext *cx, JSObject *proxy, jsid id, bool set,
                                               PropertyDescriptor *desc)
 {
     JSObject *handler = GetProxyHandlerObject(cx, proxy);
     AutoValueRooter tvr(cx);
     return GetFundamentalTrap(cx, handler, ATOM(getPropertyDescriptor), tvr.addr()) &&
            Trap1(cx, handler, tvr.value(), id, tvr.addr()) &&
            ((tvr.value().isUndefined() && IndicatePropertyNotFound(cx, desc)) ||
-            ReturnedValueMustNotBePrimitive(cx, proxy, ATOM(getPropertyDescriptor), tvr.value()) &&
-            ParsePropertyDescriptorObject(cx, proxy, id, tvr.value(), desc));
+            (ReturnedValueMustNotBePrimitive(cx, proxy, ATOM(getPropertyDescriptor), tvr.value()) &&
+             ParsePropertyDescriptorObject(cx, proxy, id, tvr.value(), desc)));
 }
 
 bool
 JSScriptedProxyHandler::getOwnPropertyDescriptor(JSContext *cx, JSObject *proxy, jsid id, bool set,
                                                  PropertyDescriptor *desc)
 {
     JSObject *handler = GetProxyHandlerObject(cx, proxy);
     AutoValueRooter tvr(cx);
     return GetFundamentalTrap(cx, handler, ATOM(getOwnPropertyDescriptor), tvr.addr()) &&
            Trap1(cx, handler, tvr.value(), id, tvr.addr()) &&
            ((tvr.value().isUndefined() && IndicatePropertyNotFound(cx, desc)) ||
-            ReturnedValueMustNotBePrimitive(cx, proxy, ATOM(getPropertyDescriptor), tvr.value()) &&
-            ParsePropertyDescriptorObject(cx, proxy, id, tvr.value(), desc));
+            (ReturnedValueMustNotBePrimitive(cx, proxy, ATOM(getPropertyDescriptor), tvr.value()) &&
+             ParsePropertyDescriptorObject(cx, proxy, id, tvr.value(), desc)));
 }
 
 bool
 JSScriptedProxyHandler::defineProperty(JSContext *cx, JSObject *proxy, jsid id,
                                        PropertyDescriptor *desc)
 {
     JSObject *handler = GetProxyHandlerObject(cx, proxy);
     AutoValueRooter tvr(cx);
--- a/js/src/jsscope.cpp
+++ b/js/src/jsscope.cpp
@@ -66,42 +66,46 @@
 #include "jsdbgapiinlines.h"
 #include "jsobjinlines.h"
 #include "jsscopeinlines.h"
 
 using namespace js;
 using namespace js::gc;
 
 uint32
-js_GenerateShape(JSContext *cx, bool gcLocked)
+js_GenerateShape(JSRuntime *rt)
 {
-    JSRuntime *rt;
     uint32 shape;
 
-    rt = cx->runtime;
     shape = JS_ATOMIC_INCREMENT(&rt->shapeGen);
     JS_ASSERT(shape != 0);
     if (shape >= SHAPE_OVERFLOW_BIT) {
         /*
          * FIXME bug 440834: The shape id space has overflowed. Currently we
          * cope badly with this and schedule the GC on the every call. But
          * first we make sure that increments from other threads would not
          * have a chance to wrap around shapeGen to zero.
          */
         rt->shapeGen = SHAPE_OVERFLOW_BIT;
         shape = SHAPE_OVERFLOW_BIT;
 
 #ifdef JS_THREADSAFE
-        Conditionally<AutoLockGC> lockIf(!gcLocked, rt);
+        AutoLockGC lockIf(rt);
 #endif
         TriggerGC(rt);
     }
     return shape;
 }
 
+uint32
+js_GenerateShape(JSContext *cx)
+{
+    return js_GenerateShape(cx->runtime);
+}
+
 bool
 JSObject::ensureClassReservedSlotsForEmptyObject(JSContext *cx)
 {
     JS_ASSERT(nativeEmpty());
 
     /*
      * Subtle rule: objects that call JSObject::ensureInstanceReservedSlots
      * must either:
@@ -193,87 +197,78 @@ Shape::hashify(JSRuntime *rt)
 #ifdef DEBUG
 # include "jsprf.h"
 # define LIVE_SCOPE_METER(cx,expr) JS_LOCK_RUNTIME_VOID(cx->runtime,expr)
 #else
 # define LIVE_SCOPE_METER(cx,expr) /* nothing */
 #endif
 
 static inline bool
-InitField(JSContext *cx, EmptyShape *JSRuntime:: *field, Class *clasp, uint32 shape)
+InitField(JSCompartment *comp, EmptyShape *JSCompartment:: *field, Class *clasp)
 {
-    if (EmptyShape *emptyShape = EmptyShape::create(cx, clasp)) {
-        cx->runtime->*field = emptyShape;
-        JS_ASSERT(emptyShape->shape == shape);
+    if (EmptyShape *emptyShape = EmptyShape::create(comp, clasp)) {
+        comp->*field = emptyShape;
         return true;
     }
     return false;
 }
 
 /* static */
 bool
-Shape::initRuntimeState(JSContext *cx)
+Shape::initEmptyShapes(JSCompartment *comp)
 {
     /*
      * NewArguments allocates dslots to have enough room for the argc of the
      * particular arguments object being created.
      * never mutated, it's safe to pretend to have all the slots possible.
      *
      * Note how the fast paths in jsinterp.cpp for JSOP_LENGTH and JSOP_GETELEM
      * bypass resolution of scope properties for length and element indices on
      * arguments objects. This helps ensure that any arguments object needing
      * its own mutable scope (with unique shape) is a rare event.
      */
-    if (!InitField(cx, &JSRuntime::emptyArgumentsShape, &js_ArgumentsClass,
-                   Shape::EMPTY_ARGUMENTS_SHAPE)) {
+    if (!InitField(comp, &JSCompartment::emptyArgumentsShape, &js_ArgumentsClass))
         return false;
-    }
 
-    if (!InitField(cx, &JSRuntime::emptyBlockShape, &js_BlockClass, Shape::EMPTY_BLOCK_SHAPE))
+    if (!InitField(comp, &JSCompartment::emptyBlockShape, &js_BlockClass))
         return false;
 
     /*
      * Initialize the shared scope for all empty Call objects so gets for args
      * and vars do not force the creation of a mutable scope for the particular
      * call object being accessed.
      */
-    if (!InitField(cx, &JSRuntime::emptyCallShape, &js_CallClass, Shape::EMPTY_CALL_SHAPE))
+    if (!InitField(comp, &JSCompartment::emptyCallShape, &js_CallClass))
         return false;
 
     /* A DeclEnv object holds the name binding for a named function expression. */
-    if (!InitField(cx, &JSRuntime::emptyDeclEnvShape, &js_DeclEnvClass,
-                   Shape::EMPTY_DECL_ENV_SHAPE)) {
+    if (!InitField(comp, &JSCompartment::emptyDeclEnvShape, &js_DeclEnvClass))
         return false;
-    }
 
     /* Non-escaping native enumerator objects share this empty scope. */
-    if (!InitField(cx, &JSRuntime::emptyEnumeratorShape, &js_IteratorClass,
-                   Shape::EMPTY_ENUMERATOR_SHAPE)) {
+    if (!InitField(comp, &JSCompartment::emptyEnumeratorShape, &js_IteratorClass))
         return false;
-    }
 
     /* Same drill for With objects. */
-    if (!InitField(cx, &JSRuntime::emptyWithShape, &js_WithClass, Shape::EMPTY_WITH_SHAPE))
+    if (!InitField(comp, &JSCompartment::emptyWithShape, &js_WithClass))
         return false;
 
     return true;
 }
 
 /* static */
 void
-Shape::finishRuntimeState(JSContext *cx)
+Shape::finishEmptyShapes(JSCompartment *comp)
 {
-    JSRuntime *rt = cx->runtime;
-
-    rt->emptyArgumentsShape = NULL;
-    rt->emptyBlockShape = NULL;
-    rt->emptyCallShape = NULL;
-    rt->emptyDeclEnvShape = NULL;
-    rt->emptyEnumeratorShape = NULL;
-    rt->emptyWithShape = NULL;
+    comp->emptyArgumentsShape = NULL;
+    comp->emptyBlockShape = NULL;
+    comp->emptyCallShape = NULL;
+    comp->emptyDeclEnvShape = NULL;
+    comp->emptyEnumeratorShape = NULL;
+    comp->emptyWithShape = NULL;
 }
 
 JS_STATIC_ASSERT(sizeof(JSHashNumber) == 4);
 JS_STATIC_ASSERT(sizeof(jsid) == JS_BYTES_PER_WORD);
 
 #if JS_BYTES_PER_WORD == 4
 # define HASH_ID(id) ((JSHashNumber)(JSID_BITS(id)))
 #elif JS_BYTES_PER_WORD == 8
@@ -586,39 +581,39 @@ Shape *
 Shape::newDictionaryShape(JSContext *cx, const Shape &child, Shape **listp)
 {
     Shape *dprop = JS_PROPERTY_TREE(cx).newShape(cx);
     if (!dprop)
         return NULL;
 
     new (dprop) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs,
                       (child.flags & ~FROZEN) | IN_DICTIONARY, child.shortid,
-                      js_GenerateShape(cx, false), child.slotSpan);
+                      js_GenerateShape(cx), child.slotSpan);
 
     dprop->listp = NULL;
     dprop->insertIntoDictionary(listp);
 
-    JS_RUNTIME_METER(cx->runtime, liveDictModeNodes);
+    JS_COMPARTMENT_METER(cx->compartment->liveDictModeNodes++);
     return dprop;
 }
 
 Shape *
 Shape::newDictionaryShapeForAddProperty(JSContext *cx, jsid id,
                                         PropertyOp getter, PropertyOp setter,
                                         uint32 slot, uintN attrs, uintN flags, intN shortid)
 {
     Shape *shape = JS_PROPERTY_TREE(cx).newShape(cx);
     if (!shape)
         return NULL;
 
     new (shape) Shape(id, getter, setter, slot, attrs, (flags & ~FROZEN) | IN_DICTIONARY, shortid);
     shape->parent = NULL;
     shape->listp = NULL;
 
-    JS_RUNTIME_METER(cx->runtime, liveDictModeNodes);
+    JS_COMPARTMENT_METER(cx->compartment->liveDictModeNodes++);
     return shape;
 }
 
 Shape *
 Shape::newDictionaryList(JSContext *cx, Shape **listp)
 {
     Shape *shape = *listp;
     Shape *list = shape;
@@ -753,22 +748,16 @@ JSObject::checkShapeConsistency()
                 }
             }
             if (prev) {
                 JS_ASSERT(prev->slotSpan >= shape->slotSpan);
                 shape->kids.checkConsistency(prev);
             }
             prev = shape;
         }
-
-        if (throttle == 0) {
-            JS_ASSERT(!shape->table);
-            JS_ASSERT(JSID_IS_EMPTY(shape->id));
-            JS_ASSERT(shape->slot == SHAPE_INVALID_SLOT);
-        }
     }
 }
 #else
 # define CHECK_SHAPE_CONSISTENCY(obj) ((void)0)
 #endif
 
 const Shape *
 JSObject::addProperty(JSContext *cx, jsid id,
@@ -848,17 +837,16 @@ JSObject::addPropertyInternal(JSContext 
 
             /* Pass the table along to the new lastProp, namely shape. */
             JS_ASSERT(shape->parent->table == table);
             shape->parent->setTable(NULL);
             shape->setTable(table);
         }
 #ifdef DEBUG
         LIVE_SCOPE_METER(cx, ++cx->runtime->liveObjectProps);
-        JS_RUNTIME_METER(cx->runtime, totalObjectProps);
 #endif
         CHECK_SHAPE_CONSISTENCY(this);
         METER(adds);
         return shape;
     }
 
     CHECK_SHAPE_CONSISTENCY(this);
     METER(addFails);
@@ -1012,17 +1000,17 @@ JSObject::putProperty(JSContext *cx, jsi
         updateFlags(shape);
 
         /*
          * We have just mutated shape in place, but nothing caches it based on
          * shape->shape unless shape is lastProp and !hasOwnShape()). Therefore
          * we regenerate only lastProp->shape. We will clearOwnShape(), which
          * sets objShape to lastProp->shape.
          */
-        lastProp->shape = js_GenerateShape(cx, false);
+        lastProp->shape = js_GenerateShape(cx);
         clearOwnShape();
     } else {
         /*
          * Updating lastProp in a non-dictionary-mode object. Such objects
          * share their shapes via a tree rooted at a prototype emptyShape, or
          * perhaps a well-known compartment-wide singleton emptyShape.
          *
          * If any shape in the tree has a property hashtable, it is shared and
@@ -1127,17 +1115,17 @@ JSObject::changeProperty(JSContext *cx, 
 
         mutableShape->rawGetter = getter;
         mutableShape->rawSetter = setter;
         mutableShape->attrs = uint8(attrs);
 
         updateFlags(shape);
 
         /* See the corresponding code in putProperty. */
-        lastProp->shape = js_GenerateShape(cx, false);
+        lastProp->shape = js_GenerateShape(cx);
         clearOwnShape();
 
         if (!js_UpdateWatchpointsForShape(cx, this, shape)) {
             METER(wrapWatchFails);
             return NULL;
         }
 
         newShape = mutableShape;
@@ -1365,17 +1353,17 @@ JSObject::generateOwnShape(JSContext *cx
      * If we are recording, here is where we forget already-guarded shapes.
      * Any subsequent property operation upon object on the trace currently
      * being recorded will re-guard (and re-memoize).
      */
     if (TraceRecorder *tr = TRACE_RECORDER(cx))
         tr->forgetGuardedShapesForObject(this);
 #endif
 
-    setOwnShape(js_GenerateShape(cx, false));
+    setOwnShape(js_GenerateShape(cx));
 }
 
 void
 JSObject::deletingShapeChange(JSContext *cx, const Shape &shape)
 {
     JS_ASSERT(!JSID_IS_VOID(shape.id));
     generateOwnShape(cx);
 }
@@ -1504,16 +1492,21 @@ PrintPropertyMethod(JSTracer *trc, char 
     if (n < bufsize)
         JS_snprintf(buf + n, bufsize - n, " method");
 }
 #endif
 
 void
 Shape::trace(JSTracer *trc) const
 {
+#ifdef DEBUG
+    JSRuntime *rt = trc->context->runtime;
+    JS_ASSERT_IF(rt->gcCurrentCompartment, compartment == rt->gcCurrentCompartment);
+#endif
+
     if (IS_GC_MARKING_TRACER(trc))
         mark();
 
     MarkId(trc, id, "id");
 
     if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) {
         if ((attrs & JSPROP_GETTER) && rawGetter) {
             JS_SET_TRACING_DETAILS(trc, PrintPropertyGetterOrSetter, this, 0);
--- a/js/src/jsscope.h
+++ b/js/src/jsscope.h
@@ -301,31 +301,25 @@ struct Shape : public JSObjectMap
 
   protected:
     mutable uint32 numSearches;     /* Only updated until it reaches HASH_MIN_SEARCHES. */
     mutable js::PropertyTable *table;
 
   public:
     inline void freeTable(JSContext *cx);
 
-    static bool initRuntimeState(JSContext *cx);
-    static void finishRuntimeState(JSContext *cx);
-
-    enum {
-        EMPTY_ARGUMENTS_SHAPE   = 1,
-        EMPTY_BLOCK_SHAPE       = 2,
-        EMPTY_CALL_SHAPE        = 3,
-        EMPTY_DECL_ENV_SHAPE    = 4,
-        EMPTY_ENUMERATOR_SHAPE  = 5,
-        EMPTY_WITH_SHAPE        = 6,
-        LAST_RESERVED_SHAPE     = 6
-    };
+    static bool initEmptyShapes(JSCompartment *comp);
+    static void finishEmptyShapes(JSCompartment *comp);
 
     jsid                id;
 
+#ifdef DEBUG
+    JSCompartment       *compartment;
+#endif
+
   protected:
     union {
         js::PropertyOp  rawGetter;      /* getter and setter hooks or objects */
         JSObject        *getterObj;     /* user-defined callable "get" object or
                                            null if shape->hasGetterValue(); or
                                            joined function object if METHOD flag
                                            is set. */
         js::Class       *clasp;         /* prototype class for empty scope */
@@ -505,17 +499,17 @@ struct Shape : public JSObjectMap
         /* Prevent unwanted mutation of shared Bindings::lastBinding nodes. */
         FROZEN          = 0x10
     };
 
     Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot, uintN attrs,
           uintN flags, intN shortid, uint32 shape = INVALID_SHAPE, uint32 slotSpan = 0);
 
     /* Used by EmptyShape (see jsscopeinlines.h). */
-    Shape(JSContext *cx, Class *aclasp);
+    Shape(JSCompartment *comp, Class *aclasp);
 
     bool marked() const         { return (flags & MARK) != 0; }
     void mark() const           { flags |= MARK; }
     void clearMark()            { flags &= ~MARK; }
 
     bool hasRegenFlag() const   { return (flags & SHAPE_REGEN) != 0; }
     void setRegenFlag()         { flags |= SHAPE_REGEN; }
     void clearRegenFlag()       { flags &= ~SHAPE_REGEN; }
@@ -633,25 +627,32 @@ struct Shape : public JSObjectMap
 #ifdef DEBUG
     void dump(JSContext *cx, FILE *fp) const;
     void dumpSubtree(JSContext *cx, int level, FILE *fp) const;
 #endif
 };
 
 struct EmptyShape : public js::Shape
 {
-    EmptyShape(JSContext *cx, js::Class *aclasp);
+    EmptyShape(JSCompartment *comp, js::Class *aclasp);
 
     js::Class *getClass() const { return clasp; };
 
+    static EmptyShape *create(JSCompartment *comp, js::Class *clasp) {
+        js::Shape *eprop = comp->propertyTree.newShapeUnchecked();
+        if (!eprop)
+            return NULL;
+        return new (eprop) EmptyShape(comp, clasp);
+    }
+
     static EmptyShape *create(JSContext *cx, js::Class *clasp) {
         js::Shape *eprop = JS_PROPERTY_TREE(cx).newShape(cx);
         if (!eprop)
             return NULL;
-        return new (eprop) EmptyShape(cx, clasp);
+        return new (eprop) EmptyShape(cx->compartment, clasp);
     }
 };
 
 } /* namespace js */
 
 /* js::Shape pointer tag bit indicating a collision. */
 #define SHAPE_COLLISION                 (jsuword(1))
 #define SHAPE_REMOVED                   ((js::Shape *) SHAPE_COLLISION)
@@ -732,16 +733,17 @@ JSObject::hasPropertyTable() const
  * FIXME: shape must not be null, should use a reference here and other places.
  */
 inline void
 JSObject::setLastProperty(const js::Shape *shape)
 {
     JS_ASSERT(!inDictionaryMode());
     JS_ASSERT(!JSID_IS_VOID(shape->id));
     JS_ASSERT_IF(lastProp, !JSID_IS_VOID(lastProp->id));
+    JS_ASSERT(shape->compartment == compartment());
 
     lastProp = const_cast<js::Shape *>(shape);
 }
 
 inline void
 JSObject::removeLastProperty()
 {
     JS_ASSERT(!inDictionaryMode());
@@ -800,22 +802,21 @@ Shape::insertIntoDictionary(js::Shape **
 /*
  * If SHORTID is set in shape->flags, we use shape->shortid rather
  * than id when calling shape's getter or setter.
  */
 #define SHAPE_USERID(shape)                                                   \
     ((shape)->hasShortID() ? INT_TO_JSID((shape)->shortid)                    \
                            : (shape)->id)
 
-#ifndef JS_THREADSAFE
-# define js_GenerateShape(cx, gcLocked)    js_GenerateShape (cx)
-#endif
+extern uint32
+js_GenerateShape(JSRuntime *rt);
 
 extern uint32
-js_GenerateShape(JSContext *cx, bool gcLocked);
+js_GenerateShape(JSContext *cx);
 
 #ifdef DEBUG
 struct JSScopeStats {
     jsrefcount          searches;
     jsrefcount          hits;
     jsrefcount          misses;
     jsrefcount          hashes;
     jsrefcount          hashHits;
--- a/js/src/jsscopeinlines.h
+++ b/js/src/jsscopeinlines.h
@@ -102,17 +102,17 @@ JSObject::canProvideEmptyShape(js::Class
 }
 
 inline void
 JSObject::updateShape(JSContext *cx)
 {
     JS_ASSERT(isNative());
     js::LeaveTraceIfGlobalObject(cx, this);
     if (hasOwnShape())
-        setOwnShape(js_GenerateShape(cx, false));
+        setOwnShape(js_GenerateShape(cx));
     else
         objShape = lastProp->shape;
 }
 
 inline void
 JSObject::updateFlags(const js::Shape *shape, bool isDefinitelyAtom)
 {
     jsuint index;
@@ -141,23 +141,23 @@ JSObject::trace(JSTracer *trc)
     js::Shape *shape = lastProp;
 
     if (IS_GC_MARKING_TRACER(trc) && cx->runtime->gcRegenShapes) {
         /*
          * Either this object has its own shape, which must be regenerated, or
          * it must have the same shape as lastProp.
          */
         if (!shape->hasRegenFlag()) {
-            shape->shape = js_RegenerateShapeForGC(cx);
+            shape->shape = js_RegenerateShapeForGC(cx->runtime);
             shape->setRegenFlag();
         }
 
         uint32 newShape = shape->shape;
         if (hasOwnShape()) {
-            newShape = js_RegenerateShapeForGC(cx);
+            newShape = js_RegenerateShapeForGC(cx->runtime);
             JS_ASSERT(newShape != shape->shape);
         }
         objShape = newShape;
     }
 
     /* Trace our property tree or dictionary ancestor line. */
     do {
         shape->trace(trc);
@@ -175,20 +175,28 @@ Shape::Shape(jsid id, js::PropertyOp get
 {
     JS_ASSERT_IF(slotSpan != SHAPE_INVALID_SLOT, slotSpan < JSObject::NSLOTS_LIMIT);
     JS_ASSERT_IF(getter && (attrs & JSPROP_GETTER), getterObj->isCallable());
     JS_ASSERT_IF(setter && (attrs & JSPROP_SETTER), setterObj->isCallable());
     kids.setNull();
 }
 
 inline
-Shape::Shape(JSContext *cx, Class *aclasp)
-  : JSObjectMap(js_GenerateShape(cx, false), JSSLOT_FREE(aclasp)), numSearches(0), table(NULL),
-    id(JSID_EMPTY), clasp(aclasp), rawSetter(NULL), slot(SHAPE_INVALID_SLOT), attrs(0),
-    flags(SHARED_EMPTY), shortid(0), parent(NULL)
+Shape::Shape(JSCompartment *comp, Class *aclasp)
+  : JSObjectMap(js_GenerateShape(comp->rt), JSSLOT_FREE(aclasp)),
+    numSearches(0),
+    table(NULL),
+    id(JSID_EMPTY),
+    clasp(aclasp),
+    rawSetter(NULL),
+    slot(SHAPE_INVALID_SLOT),
+    attrs(0),
+    flags(SHARED_EMPTY),
+    shortid(0),
+    parent(NULL)
 {
     kids.setNull();
 }
 
 inline JSDHashNumber
 Shape::hash() const
 {
     JSDHashNumber hash = 0;
@@ -271,20 +279,20 @@ Shape::set(JSContext* cx, JSObject* obj,
 
     /* See the comment in js::Shape::get as to why we check for With. */
     if (obj->getClass() == &js_WithClass)
         obj = js_UnwrapWithObject(cx, obj);
     return js::CallJSPropertyOpSetter(cx, setterOp(), obj, SHAPE_USERID(this), vp);
 }
 
 inline
-EmptyShape::EmptyShape(JSContext *cx, js::Class *aclasp)
-  : js::Shape(cx, aclasp)
+EmptyShape::EmptyShape(JSCompartment *comp, js::Class *aclasp)
+  : js::Shape(comp, aclasp)
 {
 #ifdef DEBUG
-    if (cx->runtime->meterEmptyShapes())
-        cx->runtime->emptyShapes.put(this);
+    if (comp->rt->meterEmptyShapes())
+        comp->emptyShapes.put(this);
 #endif
 }
 
 } /* namespace js */
 
 #endif /* jsscopeinlines_h___ */
--- a/js/src/jsscriptinlines.h
+++ b/js/src/jsscriptinlines.h
@@ -47,39 +47,39 @@
 #include "jsregexp.h"
 #include "jsscript.h"
 #include "jsscope.h"
 
 namespace js {
 
 inline
 Bindings::Bindings(JSContext *cx)
-  : lastBinding(cx->runtime->emptyCallShape), nargs(0), nvars(0), nupvars(0)
+  : lastBinding(cx->compartment->emptyCallShape), nargs(0), nvars(0), nupvars(0)
 {
 }
 
 inline void
 Bindings::transfer(JSContext *cx, Bindings *bindings)
 {
-    JS_ASSERT(lastBinding == cx->runtime->emptyCallShape);
+    JS_ASSERT(lastBinding == cx->compartment->emptyCallShape);
 
     *this = *bindings;
 #ifdef DEBUG
     bindings->lastBinding = NULL;
 #endif
 
     /* Preserve back-pointer invariants across the lastBinding transfer. */
     if (lastBinding->inDictionary())
         lastBinding->listp = &this->lastBinding;
 }
 
 inline void
 Bindings::clone(JSContext *cx, Bindings *bindings)
 {
-    JS_ASSERT(lastBinding == cx->runtime->emptyCallShape);
+    JS_ASSERT(lastBinding == cx->compartment->emptyCallShape);
 
     /*
      * Non-dictionary bindings are fine to share, as are dictionary bindings if
      * they're copy-on-modification.
      */
     JS_ASSERT(!bindings->lastBinding->inDictionary() || bindings->lastBinding->frozen());
 
     *this = *bindings;
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -6325,19 +6325,16 @@ public:
     bool isOk() {
         return mOk;
     }
 };
 
 JS_REQUIRES_STACK TreeFragment*
 TraceRecorder::findNestedCompatiblePeer(TreeFragment* f)
 {
-    TraceMonitor* tm;
-
-    tm = &JS_TRACE_MONITOR(cx);
     unsigned int ngslots = tree->globalSlots->length();
 
     for (; f != NULL; f = f->peer) {
         if (!f->code())
             continue;
 
         debug_only_printf(LC_TMTracer, "checking nested types %p: ", (void*)f);
 
@@ -6532,19 +6529,18 @@ TracerState::~TracerState()
         /* If we didn't already deep-bail... */
         JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL ||
                   JS_THREAD_DATA(cx)->recordingCompartment == cx->compartment);
         JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
         JS_ASSERT(JS_THREAD_DATA(cx)->onTraceCompartment == cx->compartment);
         JS_THREAD_DATA(cx)->onTraceCompartment = NULL;
     }
     
-    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
-    tm->tracerState = prev;
-    tm->tracecx = NULL;
+    traceMonitor->tracerState = prev;
+    traceMonitor->tracecx = NULL;
 }
 
 /* Call |f|, return the exit taken. */
 static JS_ALWAYS_INLINE VMSideExit*
 ExecuteTrace(JSContext* cx, TraceMonitor* tm, Fragment* f, TracerState& state)
 {
     JS_ASSERT(!tm->bailExit);
 #ifdef JS_METHODJIT