bug 579100 - various GC cleanups. r=anygregor
authorIgor Bukanov <igor@mir2.org>
Mon, 26 Jul 2010 20:44:04 +0200
changeset 48583 eb6c9591249e166b1a09ff13229db604d56413dd
parent 48582 0bf3bcc923dc8bbb2a8e9678dc73e409bddc42d4
child 48584 da869ecdb83f683a5b477f2472e1e2167e95bca2
push id14748
push userrsayre@mozilla.com
push dateSun, 01 Aug 2010 00:33:23 +0000
treeherdermozilla-central@f0df797bb2a9 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersanygregor
bugs579100
milestone2.0b2pre
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
bug 579100 - various GC cleanups. r=anygregor
js/src/jsarray.cpp
js/src/jscntxt.h
js/src/jsgc.cpp
js/src/jsprvtd.h
js/src/xpconnect/src/xpcjsruntime.cpp
js/src/xpconnect/src/xpcprivate.h
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -1007,21 +1007,19 @@ array_trace(JSTracer *trc, JSObject *obj
     for (uint32 i = 0; i < capacity; i++) {
         Value v = obj->getDenseArrayElement(i);
         if (v.isMagic(JS_ARRAY_HOLE))
             ++holes;
         else
             MarkValue(trc, obj->getDenseArrayElement(i), "dense_array_elems");
     }
 
-    if (trc == trc->context->runtime->gcMarkingTracer &&
-        holes > MIN_SPARSE_INDEX &&
-        holes > capacity / 4 * 3) {
+    if (IS_GC_MARKING_TRACER(trc) && holes > MIN_SPARSE_INDEX && holes > capacity / 4 * 3) {
         /* This might fail, in which case we don't slowify it. */
-        reinterpret_cast<JSGCTracer *>(trc)->arraysToSlowify.append(obj);
+        static_cast<GCMarker *>(trc)->arraysToSlowify.append(obj);
     }
 }
 
 extern JSObjectOps js_ArrayObjectOps;
 
 static const JSObjectMap SharedArrayMap(&js_ArrayObjectOps, JSObjectMap::SHAPELESS);
 
 JSObjectOps js_ArrayObjectOps = {
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -1166,16 +1166,60 @@ struct WrapperHasher
     }
 };
 
 typedef HashMap<Value, Value, WrapperHasher, SystemAllocPolicy> WrapperMap;
 
 class AutoValueVector;
 class AutoIdVector;
 
+struct GCMarker : public JSTracer {
+  private:
+    /* The color is only applied to objects, functions and xml. */
+    uint32 color;
+
+    /* See comments before delayMarkingChildren is jsgc.cpp. */
+    JSGCArena           *unmarkedArenaStackTop;
+#ifdef DEBUG
+    size_t              markLaterCount;
+#endif
+
+  public:
+    js::Vector<JSObject *, 0, js::SystemAllocPolicy> arraysToSlowify;
+
+  public:
+    explicit GCMarker(JSContext *cx)
+      : color(0), unmarkedArenaStackTop(NULL)
+    {
+        JS_TRACER_INIT(this, cx, NULL);
+#ifdef DEBUG
+        markLaterCount = 0;
+#endif
+    }
+
+    uint32 getMarkColor() const {
+        return color;
+    }
+
+    void setMarkColor(uint32 newColor) {
+        /*
+         * We must process any delayed marking here, otherwise we confuse
+         * colors.
+         */
+        markDelayedChildren();
+        color = newColor;
+    }
+
+    void delayMarkingChildren(void *thing);
+
+    JS_FRIEND_API(void) markDelayedChildren();
+
+    void slowifyArrays();
+};
+
 } /* namespace js */
 
 struct JSCompartment {
     JSRuntime *rt;
     JSPrincipals *principals;
     void *data;
     bool marked;
     js::WrapperMap crossCompartmentWrappers;
@@ -1192,21 +1236,16 @@ struct JSCompartment {
     bool wrap(JSContext *cx, js::PropertyOp *op);
     bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
     bool wrap(JSContext *cx, js::AutoIdVector &props);
     bool wrapException(JSContext *cx);
 
     void sweep(JSContext *cx);
 };
 
-struct JSGCTracer : public JSTracer {
-    uint32 color;
-    js::Vector<JSObject *, 0, js::SystemAllocPolicy> arraysToSlowify;
-};
-
 extern JS_FRIEND_API(void)
 js_TriggerAllOperationCallbacks(JSRuntime *rt, JSBool gcLocked);
 
 struct JSRuntime {
     /* Default compartment. */
     JSCompartment       *defaultCompartment;
 
     /* List of compartments (protected by the GC lock). */
@@ -1246,17 +1285,17 @@ struct JSRuntime {
     js::GCLocks         gcLocksHash;
     jsrefcount          gcKeepAtoms;
     size_t              gcBytes;
     size_t              gcLastBytes;
     size_t              gcMaxBytes;
     size_t              gcMaxMallocBytes;
     uint32              gcEmptyArenaPoolLifespan;
     uint32              gcNumber;
-    JSGCTracer          *gcMarkingTracer;
+    js::GCMarker        *gcMarkingTracer;
     uint32              gcTriggerFactor;
     size_t              gcTriggerBytes;
     volatile JSBool     gcIsNeeded;
     volatile JSBool     gcFlushCodeCaches;
 
     /*
      * NB: do not pack another flag here by claiming gcPadding unless the new
      * flag is written only by the GC thread.  Atomic updates to packed bytes
@@ -1285,22 +1324,16 @@ struct JSRuntime {
     JSGCCallback        gcCallback;
 
     /*
      * Malloc counter to measure memory pressure for GC scheduling. It runs
      * from gcMaxMallocBytes down to zero.
      */
     volatile ptrdiff_t  gcMallocBytes;
 
-    /* See comments before DelayMarkingChildren is jsgc.cpp. */
-    JSGCArena           *gcUnmarkedArenaStackTop;
-#ifdef DEBUG
-    size_t              gcMarkLaterCount;
-#endif
-
 #ifdef JS_THREADSAFE
     JSBackgroundThread  gcHelperThread;
 #endif
 
     js::GCChunkAllocator    *gcChunkAllocator;
     
     void setCustomGCChunkAllocator(js::GCChunkAllocator *allocator) {
         JS_ASSERT(allocator);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -235,22 +235,19 @@ JS_STATIC_ASSERT(sizeof(jsbitmap) == siz
 
 JS_STATIC_ASSERT(sizeof(JSString) % GC_CELL_SIZE == 0);
 JS_STATIC_ASSERT(sizeof(JSObject) % GC_CELL_SIZE == 0);
 JS_STATIC_ASSERT(sizeof(JSFunction) % GC_CELL_SIZE == 0);
 #ifdef JSXML
 JS_STATIC_ASSERT(sizeof(JSXML) % GC_CELL_SIZE == 0);
 #endif
 
-JS_STATIC_ASSERT(GC_CELL_SIZE == sizeof(jsdouble));
-const size_t DOUBLES_PER_ARENA = GC_CELLS_PER_ARENA;
-
 struct JSGCArenaInfo {
     /*
-     * Allocation list for the arena or NULL if the arena holds double values.
+     * Allocation list for the arena.
      */
     JSGCArenaList   *list;
 
     /*
      * Pointer to the previous arena in a linked list. The arena can either
      * belong to one of JSContext.gcArenaList lists or, when it does not have
      * any allocated GC things, to the list of free arenas in the chunk with
      * head stored in JSGCChunkInfo.lastFreeArena.
@@ -566,19 +563,18 @@ GCArenaIndexToThing(JSGCArena *a, JSGCAr
     JS_ASSERT(index <= ThingsPerArena(ainfo->list->thingSize));
     jsuword offsetInArena = index * ainfo->list->thingSize;
     return reinterpret_cast<uint8 *>(a->toPageStart() + offsetInArena);
 }
 
 /*
  * The private JSGCThing struct, which describes a JSRuntime.gcFreeList element.
  */
-union JSGCThing {
+struct JSGCThing {
     JSGCThing   *link;
-    double      asDouble;
 };
 
 static inline JSGCThing *
 MakeNewArenaFreeList(JSGCArena *a, size_t thingSize)
 {
     jsuword thingsStart = a->toPageStart();
     jsuword lastThingMinAddr = thingsStart + GC_ARENA_SIZE - thingSize * 2 + 1;
     jsuword thingPtr = thingsStart;
@@ -878,35 +874,16 @@ JS_FRIEND_API(bool)
 js_IsAboutToBeFinalized(void *thing)
 {
     if (JSString::isStatic(thing))
         return false;
 
     return !IsMarkedGCThing(thing);
 }
 
-static void
-MarkDelayedChildren(JSTracer *trc);
-
-/* The color is only applied to objects, functions and xml. */
-JS_FRIEND_API(uint32)
-js_SetMarkColor(JSTracer *trc, uint32 color)
-{
-    JSGCTracer *gctracer = trc->context->runtime->gcMarkingTracer;
-    if (trc != gctracer)
-        return color;
-
-    /* Must process any delayed tracing here, otherwise we confuse colors. */
-    MarkDelayedChildren(trc);
-
-    uint32 oldColor = gctracer->color;
-    gctracer->color = color;
-    return oldColor;
-}
-
 JS_FRIEND_API(bool)
 js_GCThingIsMarked(void *thing, uint32 color)
 {
     return IsMarkedGCThing(thing, color);
 }
 
 JSBool
 js_InitGC(JSRuntime *rt, uint32 maxbytes)
@@ -1898,35 +1875,37 @@ JS_TraceChildren(JSTracer *trc, void *th
 #if JS_HAS_XML_SUPPORT
       case JSTRACE_XML:
         js_TraceXML(trc, (JSXML *)thing);
         break;
 #endif
     }
 }
 
+namespace js {
+
 /*
  * When the native stack is low, the GC does not call JS_TraceChildren to mark
  * the reachable "children" of the thing. Rather the thing is put aside and
  * JS_TraceChildren is called later with more space on the C stack.
  *
  * To implement such delayed marking of the children with minimal overhead for
  * the normal case of sufficient native stack, the code uses two fields per
  * arena stored in JSGCMarkingDelay. The first field, JSGCMarkingDelay::link,
  * links all arenas with delayed things into a stack list with the pointer to
- * stack top in JSRuntime::gcUnmarkedArenaStackTop. DelayMarkingChildren adds
- * arenas to the stack as necessary while MarkDelayedChildren pops the arenas
+ * stack top in JSRuntime::gcUnmarkedArenaStackTop. delayMarkingChildren adds
+ * arenas to the stack as necessary while markDelayedChildren pops the arenas
  * from the stack until it empties.
  *
  * The second field, JSGCMarkingDelay::unmarkedChildren, is a bitmap that
  * tells for which things the GC should call JS_TraceChildren later. The
  * bitmap is a single word. As such it does not pinpoint the delayed things
  * in the arena but rather tells the intervals containing
  * ThingsPerUnmarkedBit(thingSize) things. Later the code in
- * MarkDelayedChildren discovers such intervals and calls JS_TraceChildren on
+ * markDelayedChildren discovers such intervals and calls JS_TraceChildren on
  * any marked thing in the interval. This implies that JS_TraceChildren can be
  * called many times for a single thing if the thing shares the same interval
  * with some delayed things. This should be fine as any GC graph
  * marking/traversing hooks must allow repeated calls during the same GC cycle.
  * In particular, xpcom cycle collector relies on this.
  *
  * Note that such repeated scanning may slow down the GC. In particular, it is
  * possible to construct an object graph where the GC calls JS_TraceChildren
@@ -1938,242 +1917,238 @@ JS_TraceChildren(JSTracer *trc, void *th
  * unmarkedChildren covers 4 objects.
  */
 inline unsigned
 ThingsPerUnmarkedBit(unsigned thingSize)
 {
     return JS_HOWMANY(ThingsPerArena(thingSize), JS_BITS_PER_WORD);
 }
 
-static void
-DelayMarkingChildren(JSRuntime *rt, void *thing)
+void
+GCMarker::delayMarkingChildren(void *thing)
 {
+    JS_ASSERT(this == context->runtime->gcMarkingTracer);
     JS_ASSERT(IsMarkedGCThing(thing));
-    METER(rt->gcStats.unmarked++);
+    METER(context->runtime->gcStats.unmarked++);
 
     JSGCArena *a = JSGCArena::fromGCThing(thing);
     JSGCArenaInfo *ainfo = a->getInfo();
     JSGCMarkingDelay *markingDelay = a->getMarkingDelay();
 
     size_t thingArenaIndex = GCThingToArenaIndex(thing);
     size_t unmarkedBitIndex = thingArenaIndex /
                               ThingsPerUnmarkedBit(ainfo->list->thingSize);
     JS_ASSERT(unmarkedBitIndex < JS_BITS_PER_WORD);
 
     jsuword bit = jsuword(1) << unmarkedBitIndex;
     if (markingDelay->unmarkedChildren != 0) {
-        JS_ASSERT(rt->gcUnmarkedArenaStackTop);
+        JS_ASSERT(unmarkedArenaStackTop);
         if (markingDelay->unmarkedChildren & bit) {
             /* bit already covers things with children to mark later. */
             return;
         }
         markingDelay->unmarkedChildren |= bit;
     } else {
         /*
          * The thing is the first thing with not yet marked children in the
          * whole arena, so push the arena on the stack of arenas with things
          * to be marked later unless the arena has already been pushed. We
          * detect that through checking prevUnmarked as the field is 0
          * only for not yet pushed arenas. To ensure that
          *   prevUnmarked != 0
          * even when the stack contains one element, we make prevUnmarked
          * for the arena at the bottom to point to itself.
          *
-         * See comments in MarkDelayedChildren.
+         * See comments in markDelayedChildren.
          */
         markingDelay->unmarkedChildren = bit;
         if (!markingDelay->link) {
-            if (!rt->gcUnmarkedArenaStackTop) {
+            if (!unmarkedArenaStackTop) {
                 /* Stack was empty, mark the arena as the bottom element. */
                 markingDelay->link = a;
             } else {
-                JS_ASSERT(rt->gcUnmarkedArenaStackTop->getMarkingDelay()->link);
-                markingDelay->link = rt->gcUnmarkedArenaStackTop;
+                JS_ASSERT(unmarkedArenaStackTop->getMarkingDelay()->link);
+                markingDelay->link = unmarkedArenaStackTop;
             }
-            rt->gcUnmarkedArenaStackTop = a;
+            unmarkedArenaStackTop = a;
         }
-        JS_ASSERT(rt->gcUnmarkedArenaStackTop);
+        JS_ASSERT(unmarkedArenaStackTop);
     }
 #ifdef DEBUG
-    rt->gcMarkLaterCount += ThingsPerUnmarkedBit(ainfo->list->thingSize);
-    METER_UPDATE_MAX(rt->gcStats.maxunmarked, rt->gcMarkLaterCount);
+    markLaterCount += ThingsPerUnmarkedBit(ainfo->list->thingSize);
+    METER_UPDATE_MAX(context->runtime->gcStats.maxunmarked, markLaterCount);
 #endif
 }
 
-static void
-MarkDelayedChildren(JSTracer *trc)
+JS_FRIEND_API(void)
+GCMarker::markDelayedChildren()
 {
-    JSRuntime *rt;
-    JSGCArena *a, *aprev;
-    unsigned thingSize, traceKind;
-    unsigned thingsPerUnmarkedBit;
-    unsigned unmarkedBitIndex, thingIndex, indexLimit, endIndex;
-
-    rt = trc->context->runtime;
-    a = rt->gcUnmarkedArenaStackTop;
+    JS_ASSERT(this == context->runtime->gcMarkingTracer);
+
+    JSGCArena *a = unmarkedArenaStackTop;
     if (!a) {
-        JS_ASSERT(rt->gcMarkLaterCount == 0);
+        JS_ASSERT(markLaterCount == 0);
         return;
     }
 
     for (;;) {
         /*
          * The following assert verifies that the current arena belongs to the
-         * unmarked stack, since DelayMarkingChildren ensures that even for
+         * unmarked stack, since delayMarkingChildren ensures that even for
          * the stack's bottom, prevUnmarked != 0 but rather points to
          * itself.
          */
         JSGCArenaInfo *ainfo = a->getInfo();
         JSGCMarkingDelay *markingDelay = a->getMarkingDelay();
         JS_ASSERT(markingDelay->link);
-        JS_ASSERT(rt->gcUnmarkedArenaStackTop->getMarkingDelay()->link);
-        thingSize = ainfo->list->thingSize;
-        traceKind = GetFinalizableArenaTraceKind(ainfo);
-        indexLimit = ThingsPerArena(thingSize);
-        thingsPerUnmarkedBit = ThingsPerUnmarkedBit(thingSize);
+        JS_ASSERT(unmarkedArenaStackTop->getMarkingDelay()->link);
+        unsigned thingSize = ainfo->list->thingSize;
+        unsigned traceKind = GetFinalizableArenaTraceKind(ainfo);
+        unsigned indexLimit = ThingsPerArena(thingSize);
+        unsigned thingsPerUnmarkedBit = ThingsPerUnmarkedBit(thingSize);
 
         /*
          * We cannot use do-while loop here as a->unmarkedChildren can be zero
          * before the loop as a leftover from the previous iterations. See
          * comments after the loop.
          */
         while (markingDelay->unmarkedChildren != 0) {
-            unmarkedBitIndex = JS_FLOOR_LOG2W(markingDelay->unmarkedChildren);
+            unsigned unmarkedBitIndex = JS_FLOOR_LOG2W(markingDelay->unmarkedChildren);
             markingDelay->unmarkedChildren &= ~(jsuword(1) << unmarkedBitIndex);
 #ifdef DEBUG
-            JS_ASSERT(rt->gcMarkLaterCount >= thingsPerUnmarkedBit);
-            rt->gcMarkLaterCount -= thingsPerUnmarkedBit;
+            JS_ASSERT(markLaterCount >= thingsPerUnmarkedBit);
+            markLaterCount -= thingsPerUnmarkedBit;
 #endif
-            thingIndex = unmarkedBitIndex * thingsPerUnmarkedBit;
-            endIndex = thingIndex + thingsPerUnmarkedBit;
+            unsigned thingIndex = unmarkedBitIndex * thingsPerUnmarkedBit;
+            unsigned endIndex = thingIndex + thingsPerUnmarkedBit;
 
             /*
              * endIndex can go beyond the last allocated thing as the real
              * limit can be "inside" the bit.
              */
             if (endIndex > indexLimit)
                 endIndex = indexLimit;
             uint8 *thing = GCArenaIndexToThing(a, ainfo, thingIndex);
             uint8 *end = GCArenaIndexToThing(a, ainfo, endIndex);
             do {
                 JS_ASSERT(thing < end);
                 if (IsMarkedGCThing(thing))
-                    JS_TraceChildren(trc, thing, traceKind);
+                    JS_TraceChildren(this, thing, traceKind);
                 thing += thingSize;
             } while (thing != end);
         }
 
         /*
          * We finished tracing of all things in the the arena but we can only
          * pop it from the stack if the arena is the stack's top.
          *
          * When JS_TraceChildren from the above calls JS_CallTracer that in
-         * turn on low C stack calls DelayMarkingChildren and the latter
+         * turn on low C stack calls delayMarkingChildren and the latter
          * pushes new arenas to the unmarked stack, we have to skip popping
          * of this arena until it becomes the top of the stack again.
          */
-        if (a == rt->gcUnmarkedArenaStackTop) {
-            aprev = markingDelay->link;
+        if (a == unmarkedArenaStackTop) {
+            JSGCArena *aprev = markingDelay->link;
             markingDelay->link = NULL;
             if (a == aprev) {
                 /*
                  * prevUnmarked points to itself and we reached the bottom of
                  * the stack.
                  */
                 break;
             }
-            rt->gcUnmarkedArenaStackTop = a = aprev;
+            unmarkedArenaStackTop = a = aprev;
         } else {
-            a = rt->gcUnmarkedArenaStackTop;
+            a = unmarkedArenaStackTop;
         }
     }
-    JS_ASSERT(rt->gcUnmarkedArenaStackTop);
-    JS_ASSERT(!rt->gcUnmarkedArenaStackTop->getMarkingDelay()->link);
-    rt->gcUnmarkedArenaStackTop = NULL;
-    JS_ASSERT(rt->gcMarkLaterCount == 0);
+    JS_ASSERT(unmarkedArenaStackTop);
+    JS_ASSERT(!unmarkedArenaStackTop->getMarkingDelay()->link);
+    unmarkedArenaStackTop = NULL;
+    JS_ASSERT(markLaterCount == 0);
 }
 
-namespace js {
+void
+GCMarker::slowifyArrays()
+{
+    while (!arraysToSlowify.empty()) {
+        JSObject *obj = arraysToSlowify.back();
+        arraysToSlowify.popBack();
+        if (IsMarkedGCThing(obj))
+            obj->makeDenseArraySlow(context);
+    }
+}
 
 void
 Mark(JSTracer *trc, void *thing, uint32 kind)
 {
-    JSContext *cx;
-    JSRuntime *rt;
-
     JS_ASSERT(thing);
     JS_ASSERT(JS_IS_VALID_TRACE_KIND(kind));
     JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
+    JS_ASSERT_IF(!JSString::isStatic(thing), kind == GetFinalizableThingTraceKind(thing));
+#ifdef DEBUG
+    if (IS_GC_MARKING_TRACER(trc)) {
+        JSRuntime *rt = trc->context->runtime;
+        JS_ASSERT(rt->gcMarkingTracer == trc);
+        JS_ASSERT(rt->gcRunning);
+    }
+#endif
 
     if (!IS_GC_MARKING_TRACER(trc)) {
         trc->callback(trc, thing, kind);
-        goto out;
+    } else {
+        GCMarker *gcmarker = static_cast<GCMarker *>(trc);
+
+        if (kind == JSTRACE_STRING) {
+            /*
+             * Optimize for string as their marking is not recursive.
+             *
+             * Iterate through all nodes and leaves in the rope if this is
+             * part of a rope; otherwise, we only iterate once: on the string
+             * itself.
+             */
+            JSRopeNodeIterator iter((JSString *) thing);
+            JSString *str = iter.init();
+            do {
+                for (;;) {
+                    if (JSString::isStatic(str))
+                        break;
+                    JS_ASSERT(kind == GetFinalizableThingTraceKind(str));
+                    if (!MarkIfUnmarkedGCThing(str))
+                        break;
+                    if (!str->isDependent())
+                        break;
+                    str = str->dependentBase();
+                }
+                str = iter.next();
+            } while (str);
+           
+        } else if (MarkIfUnmarkedGCThing(thing, gcmarker->getMarkColor())) {
+            /*
+             * With JS_GC_ASSUME_LOW_C_STACK defined the mark phase of GC
+             * always uses the non-recursive code that otherwise would be
+             * called only on a low C stack condition.
+             */
+#ifdef JS_GC_ASSUME_LOW_C_STACK
+# define RECURSION_TOO_DEEP() true
+#else
+            int stackDummy;
+# define RECURSION_TOO_DEEP() (!JS_CHECK_STACK_SIZE(trc->context, stackDummy))
+#endif
+            if (RECURSION_TOO_DEEP())
+                gcmarker->delayMarkingChildren(thing);
+            else
+                JS_TraceChildren(trc, thing, kind);
+        }
     }
 
-    cx = trc->context;
-    rt = cx->runtime;
-    JS_ASSERT(rt->gcMarkingTracer == trc);
-    JS_ASSERT(rt->gcRunning);
-
-    /*
-     * Optimize for string and double as their size is known and their tracing
-     * is not recursive.
-     */
-    if (kind == JSTRACE_STRING) {
-        /*
-         * Iterate through all nodes and leaves in the rope if this is part of a
-         * rope; otherwise, we only iterate once: on the string itself.
-         */
-        JSRopeNodeIterator iter((JSString *) thing);
-        JSString *str = iter.init();
-        do {
-            for (;;) {
-                if (JSString::isStatic(str))
-                    break;
-                JS_ASSERT(kind == GetFinalizableThingTraceKind(str));
-                if (!MarkIfUnmarkedGCThing(str))
-                    break;
-                if (!str->isDependent())
-                    break;
-                str = str->dependentBase();
-            }
-            str = iter.next();
-        } while (str);
-        goto out;
-        /* NOTREACHED */
-    }
-
-    JS_ASSERT(kind == GetFinalizableThingTraceKind(thing));
-    if (!MarkIfUnmarkedGCThing(thing, reinterpret_cast<JSGCTracer *>(trc)->color))
-        goto out;
-
-    /*
-     * With JS_GC_ASSUME_LOW_C_STACK defined the mark phase of GC always
-     * uses the non-recursive code that otherwise would be called only on
-     * a low C stack condition.
-     */
-#ifdef JS_GC_ASSUME_LOW_C_STACK
-# define RECURSION_TOO_DEEP() JS_TRUE
-#else
-    int stackDummy;
-# define RECURSION_TOO_DEEP() (!JS_CHECK_STACK_SIZE(cx, stackDummy))
-#endif
-
-    if (RECURSION_TOO_DEEP()) {
-        DelayMarkingChildren(rt, thing);
-    } else {
-        JS_TraceChildren(trc, thing, kind);
-    }
-
-  out:
 #ifdef DEBUG
     trc->debugPrinter = NULL;
     trc->debugPrintArg = NULL;
 #endif
-    return;     /* to avoid out: right_curl when DEBUG is not defined */
 }
 
 void
 MarkGCThing(JSTracer *trc, void *thing)
 {
     JS_ASSERT(size_t(thing) % JS_GCTHING_ALIGN == 0);
     
     if (!thing)
@@ -2816,25 +2791,16 @@ struct GCTimer {
 #else
 # define GCTIMER_PARAM
 # define GCTIMER_ARG
 # define TIMESTAMP(x)       ((void) 0)
 # define GCTIMER_BEGIN()    ((void) 0)
 # define GCTIMER_END(last)  ((void) 0)
 #endif
 
-static inline bool
-HasMarkedDoubles(JSGCArena *a)
-{
-    JS_STATIC_ASSERT(GC_MARK_BITMAP_SIZE == 8 * sizeof(uint64));
-    uint64 *markBitmap = (uint64 *) a->getMarkBitmap();
-    return !!(markBitmap[0] | markBitmap[1] | markBitmap[2] | markBitmap[3] |
-              markBitmap[4] | markBitmap[5] | markBitmap[6] | markBitmap[7]);
-}
-
 #ifdef JS_THREADSAFE
 
 namespace js {
 
 JS_FRIEND_API(void)
 BackgroundSweepTask::replenishAndFreeLater(void *ptr)
 {
     JS_ASSERT(freeCursor == freeCursorEnd);
@@ -2954,39 +2920,35 @@ PreGCCleanup(JSContext *cx, JSGCInvocati
  * other thread must be either outside all requests or blocked waiting for GC
  * to finish. Note that the caller does not hold rt->gcLock.
  */
 static void
 GC(JSContext *cx  GCTIMER_PARAM)
 {
     JSRuntime *rt = cx->runtime;
     rt->gcNumber++;
-    JS_ASSERT(!rt->gcUnmarkedArenaStackTop);
-    JS_ASSERT(rt->gcMarkLaterCount == 0);
 
     /*
      * Mark phase.
      */
-    JSGCTracer trc;
-    JS_TRACER_INIT(&trc, cx, NULL);
-    trc.color = BLACK;
-    rt->gcMarkingTracer = &trc;
-    JS_ASSERT(IS_GC_MARKING_TRACER(&trc));
-
+    GCMarker gcmarker(cx);
+    JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
+    JS_ASSERT(gcmarker.getMarkColor() == BLACK);
+    rt->gcMarkingTracer = &gcmarker;
+             
     for (JSGCChunkInfo **i = rt->gcChunks.begin(); i != rt->gcChunks.end(); ++i)
         (*i)->clearMarkBitmap();
-    js_TraceRuntime(&trc);
+    js_TraceRuntime(&gcmarker);
     js_MarkScriptFilenames(rt);
 
     /*
      * Mark children of things that caused too deep recursion during the above
      * tracing.
      */
-    MarkDelayedChildren(&trc);
-    JS_ASSERT(rt->gcMarkLaterCount == 0);
+    gcmarker.markDelayedChildren();
 
     rt->gcMarkingTracer = NULL;
 
     if (rt->gcCallback)
         (void) rt->gcCallback(cx, JSGC_MARK_END);
 
 #ifdef JS_THREADSAFE
     JS_ASSERT(!cx->gcSweepTask);
@@ -2997,39 +2959,38 @@ GC(JSContext *cx  GCTIMER_PARAM)
     /*
      * Sweep phase.
      *
      * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
      * so that any attempt to allocate a GC-thing from a finalizer will fail,
      * rather than nest badly and leave the unmarked newborn to be swept.
      *
      * We first sweep atom state so we can use js_IsAboutToBeFinalized on
-     * JSString or jsdouble held in a hashtable to check if the hashtable
-     * entry can be freed. Note that even after the entry is freed, JSObject
-     * finalizers can continue to access the corresponding jsdouble* and
-     * JSString* assuming that they are unique. This works since the
-     * atomization API must not be called during GC.
+     * JSString held in a hashtable to check if the hashtable entry can be
+     * freed. Note that even after the entry is freed, JSObject finalizers can
+     * continue to access the corresponding JSString* assuming that they are
+     * unique. This works since the atomization API must not be called during
+     * the GC.
      */
     TIMESTAMP(startSweep);
     js_SweepAtomState(cx);
 
     /* Finalize watch points associated with unreachable objects. */
     js_SweepWatchPoints(cx);
 
 #ifdef DEBUG
     /* Save the pre-sweep count of scope-mapped properties. */
     rt->liveScopePropsPreSweep = rt->liveScopeProps;
 #endif
 
     /*
      * We finalize iterators before other objects so the iterator can use the
      * object which properties it enumerates over to finalize the enumeration
-     * state. We finalize objects before string, double and other GC things
-     * things to ensure that object's finalizer can access them even if they
-     * will be freed.
+     * state. We finalize objects before other GC things to ensure that
+     * object's finalizer can access them even if they will be freed.
      */
     JS_ASSERT(!rt->gcEmptyArenaList);
     FinalizeArenaList<JSObject, FinalizeObject>(cx, FINALIZE_OBJECT);
     FinalizeArenaList<JSFunction, FinalizeFunction>(cx, FINALIZE_FUNCTION);
 #if JS_HAS_XML_SUPPORT
     FinalizeArenaList<JSXML, FinalizeXML>(cx, FINALIZE_XML);
 #endif
     TIMESTAMP(sweepObjectEnd);
@@ -3060,25 +3021,18 @@ GC(JSContext *cx  GCTIMER_PARAM)
     /*
      * Sweep script filenames after sweeping functions in the generic loop
      * above. In this way when a scripted function's finalizer destroys the
      * script and calls rt->destroyScriptHook, the hook can still access the
      * script's filename. See bug 323267.
      */
     js_SweepScriptFilenames(rt);
 
-    /*
-     * Slowify arrays we have accumulated.
-     */
-    while (!trc.arraysToSlowify.empty()) {
-        JSObject *obj = trc.arraysToSlowify.back();
-        trc.arraysToSlowify.popBack();
-        if (IsMarkedGCThing(obj))
-            obj->makeDenseArraySlow(cx);
-    }
+    /* Slowify arrays we have accumulated. */
+    gcmarker.slowifyArrays();
 
     /*
      * Destroy arenas after we finished the sweeping so finalizers can safely
      * use js_IsAboutToBeFinalized().
      */
     FreeGCChunks(rt);
     TIMESTAMP(sweepDestroyEnd);
 
--- a/js/src/jsprvtd.h
+++ b/js/src/jsprvtd.h
@@ -84,17 +84,17 @@ struct Compiler;
 }
 }
 
 #endif
 
 /* Struct typedefs. */
 typedef struct JSArgumentFormatMap  JSArgumentFormatMap;
 typedef struct JSCodeGenerator      JSCodeGenerator;
-typedef union JSGCThing             JSGCThing;
+typedef struct JSGCThing            JSGCThing;
 typedef struct JSGenerator          JSGenerator;
 typedef struct JSNativeEnumerator   JSNativeEnumerator;
 typedef struct JSFunctionBox        JSFunctionBox;
 typedef struct JSObjectBox          JSObjectBox;
 typedef struct JSParseNode          JSParseNode;
 typedef struct JSProperty           JSProperty;
 typedef struct JSSharpObjectMap     JSSharpObjectMap;
 typedef struct JSEmptyScope         JSEmptyScope;
--- a/js/src/xpconnect/src/xpcjsruntime.cpp
+++ b/js/src/xpconnect/src/xpcjsruntime.cpp
@@ -332,19 +332,25 @@ void XPCJSRuntime::TraceJS(JSTracer* trc
     }
 
     // XPCJSObjectHolders don't participate in cycle collection, so always trace
     // them here.
     for(XPCRootSetElem *e = self->mObjectHolderRoots; e ; e = e->GetNextRoot())
         static_cast<XPCJSObjectHolder*>(e)->TraceJS(trc);
 
     // Mark these roots as gray so the CC can walk them later.
-    uint32 oldColor = js_SetMarkColor(trc, XPC_GC_COLOR_GRAY);
+    js::GCMarker *gcmarker = NULL;
+    if (IS_GC_MARKING_TRACER(trc)) {
+        gcmarker = static_cast<js::GCMarker *>(trc);
+        JS_ASSERT(gcmarker->getMarkColor() == XPC_GC_COLOR_BLACK);
+        gcmarker->setMarkColor(XPC_GC_COLOR_GRAY);
+    }
     self->TraceXPConnectRoots(trc);
-    js_SetMarkColor(trc, oldColor);
+    if (gcmarker)
+        gcmarker->setMarkColor(XPC_GC_COLOR_BLACK);
 }
 
 static void
 TraceJSObject(PRUint32 aLangID, void *aScriptThing, void *aClosure)
 {
     if(aLangID == nsIProgrammingLanguage::JAVASCRIPT)
     {
         JS_CALL_TRACER(static_cast<JSTracer*>(aClosure), aScriptThing,
--- a/js/src/xpconnect/src/xpcprivate.h
+++ b/js/src/xpconnect/src/xpcprivate.h
@@ -406,16 +406,17 @@ private:
 /***************************************************************************
 ****************************************************************************
 *
 * Core runtime and context classes...
 *
 ****************************************************************************
 ***************************************************************************/
 
+static const uint32 XPC_GC_COLOR_BLACK = 0;
 static const uint32 XPC_GC_COLOR_GRAY = 1;
 
 // We have a general rule internally that getters that return addref'd interface
 // pointer generally do so using an 'out' parm. When interface pointers are
 // returned as function call result values they are not addref'd. Exceptions
 // to this rule are noted explicitly.
 
 const PRBool OBJ_IS_GLOBAL = PR_TRUE;