Backout a6b604916694 (bug 747066) because it's broken
authorBill McCloskey <wmccloskey@mozilla.com>
Fri, 30 Nov 2012 12:20:24 -0800
changeset 123751 a4c6aedf00deb3d3683a5253c88e42acb3b22e29
parent 123750 ab440f162ab8b884c749faee7c099616af0e7678
child 123752 827260eb78e5bae0618fcb82c874c0a4cc1d5469
push id2151
push userlsblakk@mozilla.com
push dateTue, 19 Feb 2013 18:06:57 +0000
treeherdermozilla-beta@4952e88741ec [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs747066
milestone20.0a1
backs outa6b60491669479843bf270cf79cea8bd2af03f49
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backout a6b604916694 (bug 747066) because it's broken
js/public/HeapAPI.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/ion/Ion.cpp
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsfriendapi.cpp
js/src/jsfriendapi.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsscript.h
js/src/vm/String.h
js/xpconnect/src/xpcpublic.h
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -31,126 +31,46 @@ const size_t ArenaShift = PageShift;
 const size_t PageSize = size_t(1) << PageShift;
 const size_t ArenaSize = size_t(1) << ArenaShift;
 const size_t ArenaMask = ArenaSize - 1;
 
 const size_t ChunkShift = 20;
 const size_t ChunkSize = size_t(1) << ChunkShift;
 const size_t ChunkMask = ChunkSize - 1;
 
-const size_t CellShift = 3;
-const size_t CellSize = size_t(1) << CellShift;
-const size_t CellMask = CellSize - 1;
-
-/* These are magic constants derived from actual offsets in gc/Heap.h. */
-const size_t ChunkMarkBitmapOffset = 1032376;
-const size_t ChunkMarkBitmapBits = 129024;
-
-/*
- * Live objects are marked black. How many other additional colors are available
- * depends on the size of the GCThing. Objects marked gray are eligible for
- * cycle collection.
- */
-static const uint32_t BLACK = 0;
-static const uint32_t GRAY = 1;
-
 } /* namespace gc */
 } /* namespace js */
 
 namespace JS {
+
 namespace shadow {
 
 struct ArenaHeader
 {
     JSCompartment *compartment;
 };
 
-struct Compartment
-{
-    bool needsBarrier_;
-
-    Compartment() : needsBarrier_(false) {}
-};
-
 } /* namespace shadow */
-} /* namespace JS */
-
-namespace js {
-namespace gc {
-
-static inline uintptr_t *
-GetGCThingMarkBitmap(const void *thing)
-{
-    uintptr_t addr = uintptr_t(thing);
-    addr &= ~js::gc::ChunkMask;
-    addr |= js::gc::ChunkMarkBitmapOffset;
-    return reinterpret_cast<uintptr_t *>(addr);
-}
-
-static inline void
-GetGCThingMarkWordAndMask(const void *thing, uint32_t color,
-                          uintptr_t **wordp, uintptr_t *maskp)
-{
-    uintptr_t addr = uintptr_t(thing);
-    size_t bit = (addr & js::gc::ChunkMask) / js::gc::CellSize + color;
-    JS_ASSERT(bit < js::gc::ChunkMarkBitmapBits);
-    uintptr_t *bitmap = GetGCThingMarkBitmap(thing);
-    *maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
-    *wordp = &bitmap[bit / JS_BITS_PER_WORD];
-}
 
 static inline shadow::ArenaHeader *
 GetGCThingArena(void *thing)
 {
     uintptr_t addr = uintptr_t(thing);
     addr &= ~js::gc::ArenaMask;
     return reinterpret_cast<shadow::ArenaHeader *>(addr);
 }
 
-} /* namespace gc */
-} /* namespace js */
-
-namespace JS {
-
 static inline JSCompartment *
 GetGCThingCompartment(void *thing)
 {
     JS_ASSERT(thing);
-    return js::gc::GetGCThingArena(thing)->compartment;
+    return GetGCThingArena(thing)->compartment;
 }
 
 static inline JSCompartment *
 GetObjectCompartment(JSObject *obj)
 {
     return GetGCThingCompartment(obj);
 }
 
-#if defined(XP_WIN) && JS_BITS_PER_WORD == 64
-
-/* We don't inline for Win64 because of a compiler bug. See bug 747066. */
-bool
-GCThingIsMarkedGray(void *thing);
-
-bool
-IsIncrementalBarrierNeededOnGCThing(void *thing);
-
-#else
-
-static inline bool
-GCThingIsMarkedGray(void *thing)
-{
-    uintptr_t *word, mask;
-    js::gc::GetGCThingMarkWordAndMask(thing, js::gc::GRAY, &word, &mask);
-    return *word & mask;
-}
-
-static inline bool
-IsIncrementalBarrierNeededOnGCThing(void *thing)
-{
-    JSCompartment *comp = GetGCThingCompartment(thing);
-    return reinterpret_cast<shadow::Compartment *>(comp)->needsBarrier_;
-}
-
-#endif /* #ifdef win64 */
-
 } /* namespace JS */
 
 #endif /* js_heap_api_h___ */
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -29,16 +29,24 @@ namespace js {
 class FreeOp;
 
 namespace gc {
 
 struct Arena;
 struct ArenaHeader;
 struct Chunk;
 
+/*
+ * Live objects are marked black. How many other additional colors are available
+ * depends on the size of the GCThing. Objects marked gray are eligible for
+ * cycle collection.
+ */
+static const uint32_t BLACK = 0;
+static const uint32_t GRAY = 1;
+
 /* The GC allocation kinds. */
 enum AllocKind {
     FINALIZE_OBJECT0,
     FINALIZE_OBJECT0_BACKGROUND,
     FINALIZE_OBJECT2,
     FINALIZE_OBJECT2_BACKGROUND,
     FINALIZE_OBJECT4,
     FINALIZE_OBJECT4_BACKGROUND,
@@ -72,16 +80,20 @@ static const unsigned FINALIZE_OBJECT_LI
  */
 static const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
 
 /*
  * A GC cell is the base class for all GC things.
  */
 struct Cell
 {
+    static const size_t CellShift = 3;
+    static const size_t CellSize = size_t(1) << CellShift;
+    static const size_t CellMask = CellSize - 1;
+
     inline uintptr_t address() const;
     inline ArenaHeader *arenaHeader() const;
     inline Chunk *chunk() const;
     inline AllocKind getAllocKind() const;
     MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
     MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
     MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
 
@@ -99,17 +111,17 @@ struct Cell
 const static uint32_t FreeCommittedArenasThreshold = (32 << 20) / ArenaSize;
 
 /*
  * The mark bitmap has one bit per each GC cell. For multi-cell GC things this
  * wastes space but allows to avoid expensive devisions by thing's size when
  * accessing the bitmap. In addition this allows to use some bits for colored
  * marking during the cycle GC.
  */
-const size_t ArenaCellCount = size_t(1) << (ArenaShift - CellShift);
+const size_t ArenaCellCount = size_t(1) << (ArenaShift - Cell::CellShift);
 const size_t ArenaBitmapBits = ArenaCellCount;
 const size_t ArenaBitmapBytes = ArenaBitmapBits / 8;
 const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
 
 /*
  * A FreeSpan represents a contiguous sequence of free cells in an Arena.
  * |first| is the address of the first free cell in the span. |last| is the
  * address of the last free cell in the span. This last cell holds a FreeSpan
@@ -128,17 +140,17 @@ const size_t ArenaBitmapWords = ArenaBit
  * next span. So to allocate from it we need to update the span list head
  * with a copy of the span stored at |last| address so the following
  * allocations will use that span.
  *
  * |first| > |last| implies that we have an empty last span and the arena is
  * fully used.
  *
  * Also only for the last span (|last| & 1)! = 0 as all allocation sizes are
- * multiples of CellSize.
+ * multiples of Cell::CellSize.
  */
 struct FreeSpan
 {
     uintptr_t   first;
     uintptr_t   last;
 
   public:
     FreeSpan() {}
@@ -245,17 +257,17 @@ struct FreeSpan
          * first == ArenaMask + 1 for an empty span.
          */
         uintptr_t arenaAddr = arenaAddress();
         return encodeOffsets(first - arenaAddr, last & ArenaMask);
     }
 
     /* See comments before FreeSpan for details. */
     MOZ_ALWAYS_INLINE void *allocate(size_t thingSize) {
-        JS_ASSERT(thingSize % CellSize == 0);
+        JS_ASSERT(thingSize % Cell::CellSize == 0);
         checkSpan();
         uintptr_t thing = first;
         if (thing < last) {
             /* Bump-allocate from the current span. */
             first = thing + thingSize;
         } else if (JS_LIKELY(thing == last)) {
             /*
              * Move to the next span. We use JS_LIKELY as without PGO
@@ -266,17 +278,17 @@ struct FreeSpan
             return NULL;
         }
         checkSpan();
         return reinterpret_cast<void *>(thing);
     }
 
     /* A version of allocate when we know that the span is not empty. */
     MOZ_ALWAYS_INLINE void *infallibleAllocate(size_t thingSize) {
-        JS_ASSERT(thingSize % CellSize == 0);
+        JS_ASSERT(thingSize % Cell::CellSize == 0);
         checkSpan();
         uintptr_t thing = first;
         if (thing < last) {
             first = thing + thingSize;
         } else {
             JS_ASSERT(thing == last);
             *this = *reinterpret_cast<FreeSpan *>(thing);
         }
@@ -314,37 +326,37 @@ struct FreeSpan
 
             if (first - 1 == last) {
                 /* The span is last and empty. The above start != 0 check
                  * implies that we are not at the end of the address space.
                  */
                 return;
             }
             size_t spanLength = last - first + 1;
-            JS_ASSERT(spanLength % CellSize == 0);
+            JS_ASSERT(spanLength % Cell::CellSize == 0);
 
             /* Start and end must belong to the same arena. */
             JS_ASSERT((first & ~ArenaMask) == arenaAddr);
             return;
         }
 
         /* The span is not the last and we have more spans to follow. */
         JS_ASSERT(first <= last);
         size_t spanLengthWithoutOneThing = last - first;
-        JS_ASSERT(spanLengthWithoutOneThing % CellSize == 0);
+        JS_ASSERT(spanLengthWithoutOneThing % Cell::CellSize == 0);
 
         JS_ASSERT((first & ~ArenaMask) == arenaAddr);
 
         /*
          * If there is not enough space before the arena end to allocate one
          * more thing, then the span must be marked as the last one to avoid
          * storing useless empty span reference.
          */
         size_t beforeTail = ArenaSize - (last & ArenaMask);
-        JS_ASSERT(beforeTail >= sizeof(FreeSpan) + CellSize);
+        JS_ASSERT(beforeTail >= sizeof(FreeSpan) + Cell::CellSize);
 
         FreeSpan *next = reinterpret_cast<FreeSpan *>(last);
 
         /*
          * The GC things on the list of free spans come from one arena
          * and the spans are linked in ascending address order with
          * at least one non-free thing between spans.
          */
@@ -532,17 +544,17 @@ struct Arena
         return ThingSizes[kind];
     }
 
     static size_t firstThingOffset(AllocKind kind) {
         return FirstThingOffsets[kind];
     }
 
     static size_t thingsPerArena(size_t thingSize) {
-        JS_ASSERT(thingSize % CellSize == 0);
+        JS_ASSERT(thingSize % Cell::CellSize == 0);
 
         /* We should be able to fit FreeSpan in any GC thing. */
         JS_ASSERT(thingSize >= sizeof(FreeSpan));
 
         return (ArenaSize - sizeof(ArenaHeader)) / thingSize;
     }
 
     static size_t thingsSpan(size_t thingSize) {
@@ -582,24 +594,16 @@ ArenaHeader::getThingSize() const
 struct ChunkInfo
 {
     Chunk           *next;
     Chunk           **prevp;
 
     /* Free arenas are linked together with aheader.next. */
     ArenaHeader     *freeArenasHead;
 
-#if JS_BITS_PER_WORD == 32
-    /*
-     * Calculating sizes and offsets is simpler if sizeof(ChunkInfo) is
-     * architecture-independent.
-     */
-    char            padding[12];
-#endif
-
     /*
      * Decommitted arenas are tracked by a bitmap in the chunk header. We use
      * this offset to start our search iteration close to a decommitted arena
      * that we can allocate.
      */
     uint32_t        lastDecommittedArenaOffset;
 
     /* Number of free arenas, either committed or decommitted. */
@@ -647,20 +651,17 @@ const size_t ChunkBytesAvailable = Chunk
 const size_t ArenasPerChunk = ChunkBytesAvailable / BytesPerArenaWithHeader;
 
 /* A chunk bitmap contains enough mark bits for all the cells in a chunk. */
 struct ChunkBitmap
 {
     uintptr_t bitmap[ArenaBitmapWords * ArenasPerChunk];
 
     MOZ_ALWAYS_INLINE void getMarkWordAndMask(const Cell *cell, uint32_t color,
-                                              uintptr_t **wordp, uintptr_t *maskp)
-    {
-        GetGCThingMarkWordAndMask(cell, color, wordp, maskp);
-    }
+                                             uintptr_t **wordp, uintptr_t *maskp);
 
     MOZ_ALWAYS_INLINE bool isMarked(const Cell *cell, uint32_t color) {
         uintptr_t *word, mask;
         getMarkWordAndMask(cell, color, &word, &mask);
         return *word & mask;
     }
 
     MOZ_ALWAYS_INLINE bool markIfUnmarked(const Cell *cell, uint32_t color) {
@@ -702,17 +703,16 @@ struct ChunkBitmap
 
         uintptr_t *word, unused;
         getMarkWordAndMask(reinterpret_cast<Cell *>(aheader->address()), BLACK, &word, &unused);
         return word;
     }
 };
 
 JS_STATIC_ASSERT(ArenaBitmapBytes * ArenasPerChunk == sizeof(ChunkBitmap));
-JS_STATIC_ASSERT(js::gc::ChunkMarkBitmapBits == ArenaBitmapBits * ArenasPerChunk);
 
 typedef BitArray<ArenasPerChunk> PerArenaBitmap;
 
 const size_t ChunkPadSize = ChunkSize
                             - (sizeof(Arena) * ArenasPerChunk)
                             - sizeof(ChunkBitmap)
                             - sizeof(PerArenaBitmap)
                             - sizeof(ChunkInfo);
@@ -805,23 +805,22 @@ struct Chunk
   public:
     /* Unlink and return the freeArenasHead. */
     inline ArenaHeader* fetchNextFreeArena(JSRuntime *rt);
 
     inline void addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader);
 };
 
 JS_STATIC_ASSERT(sizeof(Chunk) == ChunkSize);
-JS_STATIC_ASSERT(js::gc::ChunkMarkBitmapOffset == offsetof(Chunk, bitmap));
 
 inline uintptr_t
 Cell::address() const
 {
     uintptr_t addr = uintptr_t(this);
-    JS_ASSERT(addr % CellSize == 0);
+    JS_ASSERT(addr % Cell::CellSize == 0);
     JS_ASSERT(Chunk::withinArenasRange(addr));
     return addr;
 }
 
 inline uintptr_t
 ArenaHeader::address() const
 {
     uintptr_t addr = reinterpret_cast<uintptr_t>(this);
@@ -915,38 +914,48 @@ ArenaHeader::setNextAllocDuringSweep(Are
 inline void
 ArenaHeader::unsetAllocDuringSweep()
 {
     JS_ASSERT(allocatedDuringIncremental);
     allocatedDuringIncremental = 0;
     auxNextLink = 0;
 }
 
+JS_ALWAYS_INLINE void
+ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color,
+                                uintptr_t **wordp, uintptr_t *maskp)
+{
+    size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color;
+    JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
+    *maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
+    *wordp = &bitmap[bit / JS_BITS_PER_WORD];
+}
+
 static void
 AssertValidColor(const void *thing, uint32_t color)
 {
 #ifdef DEBUG
     ArenaHeader *aheader = reinterpret_cast<const Cell *>(thing)->arenaHeader();
-    JS_ASSERT_IF(color, color < aheader->getThingSize() / CellSize);
+    JS_ASSERT_IF(color, color < aheader->getThingSize() / Cell::CellSize);
 #endif
 }
 
 inline ArenaHeader *
 Cell::arenaHeader() const
 {
     uintptr_t addr = address();
     addr &= ~ArenaMask;
     return reinterpret_cast<ArenaHeader *>(addr);
 }
 
 Chunk *
 Cell::chunk() const
 {
     uintptr_t addr = uintptr_t(this);
-    JS_ASSERT(addr % CellSize == 0);
+    JS_ASSERT(addr % Cell::CellSize == 0);
     addr &= ~(ChunkSize - 1);
     return reinterpret_cast<Chunk *>(addr);
 }
 
 AllocKind
 Cell::getAllocKind() const
 {
     return arenaHeader()->getAllocKind();
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -1217,17 +1217,17 @@ GCMarker::restoreValueArray(JSObject *ob
 }
 
 void
 GCMarker::processMarkStackOther(SliceBudget &budget, uintptr_t tag, uintptr_t addr)
 {
     if (tag == TypeTag) {
         ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr));
     } else if (tag == SavedValueArrayTag) {
-        JS_ASSERT(!(addr & CellMask));
+        JS_ASSERT(!(addr & Cell::CellMask));
         JSObject *obj = reinterpret_cast<JSObject *>(addr);
         HeapValue *vp, *end;
         if (restoreValueArray(obj, (void **)&vp, (void **)&end))
             pushValueArray(obj, vp, end);
         else
             pushObject(obj);
     } else if (tag == IonCodeTag) {
         MarkChildren(this, reinterpret_cast<ion::IonCode *>(addr));
@@ -1282,17 +1282,17 @@ GCMarker::processMarkStackTop(SliceBudge
     JSObject *obj;
 
     uintptr_t addr = stack.pop();
     uintptr_t tag = addr & StackTagMask;
     addr &= ~StackTagMask;
 
     if (tag == ValueArrayTag) {
         JS_STATIC_ASSERT(ValueArrayTag == 0);
-        JS_ASSERT(!(addr & CellMask));
+        JS_ASSERT(!(addr & Cell::CellMask));
         obj = reinterpret_cast<JSObject *>(addr);
         uintptr_t addr2 = stack.pop();
         uintptr_t addr3 = stack.pop();
         JS_ASSERT(addr2 <= addr3);
         JS_ASSERT((addr3 - addr2) % sizeof(Value) == 0);
         vp = reinterpret_cast<HeapSlot *>(addr2);
         end = reinterpret_cast<HeapSlot *>(addr3);
         goto scan_value_array;
--- a/js/src/ion/Ion.cpp
+++ b/js/src/ion/Ion.cpp
@@ -46,17 +46,17 @@
 
 using namespace js;
 using namespace js::ion;
 
 // Global variables.
 IonOptions ion::js_IonOptions;
 
 // Assert that IonCode is gc::Cell aligned.
-JS_STATIC_ASSERT(sizeof(IonCode) % gc::CellSize == 0);
+JS_STATIC_ASSERT(sizeof(IonCode) % gc::Cell::CellSize == 0);
 
 #ifdef JS_THREADSAFE
 static bool IonTLSInitialized = false;
 static unsigned IonTLSIndex;
 
 static inline IonContext *
 CurrentIonContext()
 {
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -45,16 +45,17 @@ using mozilla::DebugOnly;
 
 JSCompartment::JSCompartment(JSRuntime *rt)
   : rt(rt),
     principals(NULL),
     global_(NULL),
 #ifdef JSGC_GENERATIONAL
     gcStoreBuffer(&gcNursery),
 #endif
+    needsBarrier_(false),
     ionUsingBarriers_(false),
     gcScheduled(false),
     gcState(NoGC),
     gcPreserveCode(false),
     gcBytes(0),
     gcTriggerBytes(0),
     gcHeapGrowthFactor(3.0),
     hold(false),
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -113,17 +113,17 @@ namespace JS {
 struct TypeInferenceSizes;
 }
 
 namespace js {
 class AutoDebugModeGC;
 struct DebugScopes;
 }
 
-struct JSCompartment : private JS::shadow::Compartment, public js::gc::GraphNodeBase
+struct JSCompartment : public js::gc::GraphNodeBase
 {
     JSRuntime                    *rt;
     JSPrincipals                 *principals;
 
   private:
     friend struct JSRuntime;
     friend struct JSContext;
     js::GlobalObject             *global_;
@@ -153,16 +153,17 @@ struct JSCompartment : private JS::shado
     js::gc::ArenaLists           arenas;
 
 #ifdef JSGC_GENERATIONAL
     js::gc::Nursery              gcNursery;
     js::gc::StoreBuffer          gcStoreBuffer;
 #endif
 
   private:
+    bool                         needsBarrier_;
     bool                         ionUsingBarriers_;
   public:
 
     bool needsBarrier() const {
         return needsBarrier_;
     }
 
     bool compileBarriers(bool needsBarrier) const {
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -552,16 +552,23 @@ JS_IsDeadWrapper(JSObject *obj)
 
 void
 js::TraceWeakMaps(WeakMapTracer *trc)
 {
     WeakMapBase::traceAllMappings(trc);
     WatchpointMap::traceAll(trc);
 }
 
+JS_FRIEND_API(bool)
+js::GCThingIsMarkedGray(void *thing)
+{
+    JS_ASSERT(thing);
+    return reinterpret_cast<gc::Cell *>(thing)->isMarked(gc::GRAY);
+}
+
 JS_FRIEND_API(JSGCTraceKind)
 js::GCThingTraceKind(void *thing)
 {
     JS_ASSERT(thing);
     return gc::GetGCThingTraceKind(thing);
 }
 
 JS_FRIEND_API(void)
@@ -881,16 +888,28 @@ js::IsIncrementalBarrierNeeded(JSRuntime
 }
 
 JS_FRIEND_API(bool)
 js::IsIncrementalBarrierNeeded(JSContext *cx)
 {
     return IsIncrementalBarrierNeeded(cx->runtime);
 }
 
+JS_FRIEND_API(bool)
+js::IsIncrementalBarrierNeededOnObject(RawObject obj)
+{
+    return obj->compartment()->needsBarrier();
+}
+
+JS_FRIEND_API(bool)
+js::IsIncrementalBarrierNeededOnScript(JSScript *script)
+{
+    return script->compartment()->needsBarrier();
+}
+
 JS_FRIEND_API(void)
 js::IncrementalReferenceBarrier(void *ptr)
 {
     if (!ptr)
         return;
 
     gc::Cell *cell = static_cast<gc::Cell *>(ptr);
     JS_ASSERT(!cell->compartment()->rt->isHeapBusy());
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -261,16 +261,19 @@ struct WeakMapTracer {
 
     WeakMapTracer(JSRuntime *rt, WeakMapTraceCallback cb)
         : runtime(rt), callback(cb) {}
 };
 
 extern JS_FRIEND_API(void)
 TraceWeakMaps(WeakMapTracer *trc);
 
+extern JS_FRIEND_API(bool)
+GCThingIsMarkedGray(void *thing);
+
 JS_FRIEND_API(void)
 UnmarkGrayGCThing(void *thing);
 
 typedef void
 (GCThingCallback)(void *closure, void *gcthing);
 
 extern JS_FRIEND_API(void)
 VisitGrayWrapperTargets(JSCompartment *comp, GCThingCallback *callback, void *closure);
@@ -877,16 +880,22 @@ extern JS_FRIEND_API(void)
 DisableIncrementalGC(JSRuntime *rt);
 
 extern JS_FRIEND_API(bool)
 IsIncrementalBarrierNeeded(JSRuntime *rt);
 
 extern JS_FRIEND_API(bool)
 IsIncrementalBarrierNeeded(JSContext *cx);
 
+extern JS_FRIEND_API(bool)
+IsIncrementalBarrierNeededOnObject(RawObject obj);
+
+extern JS_FRIEND_API(bool)
+IsIncrementalBarrierNeededOnScript(JSScript *obj);
+
 extern JS_FRIEND_API(void)
 IncrementalReferenceBarrier(void *ptr);
 
 extern JS_FRIEND_API(void)
 IncrementalValueBarrier(const Value &v);
 
 extern JS_FRIEND_API(void)
 PokeGC(JSRuntime *rt);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -299,17 +299,17 @@ Arena::staticAsserts()
     JS_STATIC_ASSERT(JS_ARRAY_LENGTH(FirstThingOffsets) == FINALIZE_LIMIT);
 }
 
 template<typename T>
 inline bool
 Arena::finalize(FreeOp *fop, AllocKind thingKind, size_t thingSize)
 {
     /* Enforce requirements on size of T. */
-    JS_ASSERT(thingSize % CellSize == 0);
+    JS_ASSERT(thingSize % Cell::CellSize == 0);
     JS_ASSERT(thingSize <= 255);
 
     JS_ASSERT(aheader.allocated());
     JS_ASSERT(thingKind == aheader.getAllocKind());
     JS_ASSERT(thingSize == aheader.getThingSize());
     JS_ASSERT(!aheader.hasDelayedMarking);
     JS_ASSERT(!aheader.markOverflow);
     JS_ASSERT(!aheader.allocatedDuringIncremental);
@@ -4515,29 +4515,8 @@ JSXML *
 js_NewGCXML(JSContext *cx)
 {
     if (!cx->runningWithTrustedPrincipals())
         ++sE4XObjectsCreated;
 
     return NewGCThing<JSXML>(cx, js::gc::FINALIZE_XML, sizeof(JSXML));
 }
 #endif
-
-#if defined(XP_WIN) && JS_BITS_PER_WORD == 64
-namespace JS {
-
-bool
-GCThingIsMarkedGray(void *thing)
-{
-    uintptr_t *word, mask;
-    js::gc::GetGCThingMarkWordAndMask(thing, js::gc::GRAY, &word, &mask);
-    return *word & mask;
-}
-
-bool
-IsIncrementalBarrierNeededOnGCThing(void *thing)
-{
-    JSCompartment *comp = GetGCThingCompartment(thing);
-    return reinterpret_cast<shadow::Compartment *>(comp)->needsBarrier_;
-}
-
-} /* namespace JS */
-#endif /* #ifdef win64 */
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -914,17 +914,17 @@ struct GCMarker : public JSTracer {
         IonCodeTag,
         LastTag = IonCodeTag
     };
 
     static const uintptr_t StackTagMask = 7;
 
     static void staticAsserts() {
         JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag));
-        JS_STATIC_ASSERT(StackTagMask <= gc::CellMask);
+        JS_STATIC_ASSERT(StackTagMask <= gc::Cell::CellMask);
     }
 
   public:
     explicit GCMarker();
     bool init();
 
     void setSizeLimit(size_t size) { stack.setSizeLimit(size); }
     size_t sizeLimit() const { return stack.sizeLimit; }
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -923,17 +923,17 @@ struct JSScript : public js::gc::Cell
     }
 
     void markChildren(JSTracer *trc);
 };
 
 JS_STATIC_ASSERT(sizeof(JSScript::ArrayBitsT) * 8 >= JSScript::LIMIT);
 
 /* If this fails, add/remove padding within JSScript. */
-JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::CellSize == 0);
+JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::Cell::CellSize == 0);
 
 namespace js {
 
 /*
  * Iterator over a script's bindings (formals and variables).
  * The order of iteration is:
  *  - first, formal arguments, from index 0 to numArgs
  *  - next, variables, from index 0 to numVars
--- a/js/src/vm/String.h
+++ b/js/src/vm/String.h
@@ -648,21 +648,21 @@ class JSInlineString : public JSFlatStri
         return length <= MAX_INLINE_LENGTH;
     }
 };
 
 JS_STATIC_ASSERT(sizeof(JSInlineString) == sizeof(JSString));
 
 class JSShortString : public JSInlineString
 {
-    /* This can be any value that is a multiple of CellSize. */
+    /* This can be any value that is a multiple of Cell::CellSize. */
     static const size_t INLINE_EXTENSION_CHARS = sizeof(JSString::Data) / sizeof(jschar);
 
     static void staticAsserts() {
-        JS_STATIC_ASSERT(INLINE_EXTENSION_CHARS % js::gc::CellSize == 0);
+        JS_STATIC_ASSERT(INLINE_EXTENSION_CHARS % js::gc::Cell::CellSize == 0);
         JS_STATIC_ASSERT(MAX_SHORT_LENGTH + 1 ==
                          (sizeof(JSShortString) -
                           offsetof(JSShortString, d.inlineStorage)) / sizeof(jschar));
     }
 
   protected: /* to fool clang into not warning this is unused */
     jschar inlineStorageExtension[INLINE_EXTENSION_CHARS];
 
--- a/js/xpconnect/src/xpcpublic.h
+++ b/js/xpconnect/src/xpcpublic.h
@@ -123,17 +123,17 @@ xpc_FastGetCachedWrapper(nsWrapperCache 
 }
 
 // The JS GC marks objects gray that are held alive directly or
 // indirectly by an XPConnect root. The cycle collector explores only
 // this subset of the JS heap.
 inline JSBool
 xpc_IsGrayGCThing(void *thing)
 {
-    return JS::GCThingIsMarkedGray(thing);
+    return js::GCThingIsMarkedGray(thing);
 }
 
 // The cycle collector only cares about some kinds of GCthings that are
 // reachable from an XPConnect root. Implemented in nsXPConnect.cpp.
 extern JSBool
 xpc_GCThingIsGrayCCThing(void *thing);
 
 // Implemented in nsXPConnect.cpp.
@@ -141,17 +141,17 @@ extern void
 xpc_UnmarkGrayGCThingRecursive(void *thing, JSGCTraceKind kind);
 
 // Unmark gray for known-nonnull cases
 MOZ_ALWAYS_INLINE void
 xpc_UnmarkNonNullGrayObject(JSObject *obj)
 {
     if (xpc_IsGrayGCThing(obj))
         xpc_UnmarkGrayGCThingRecursive(obj, JSTRACE_OBJECT);
-    else if (JS::IsIncrementalBarrierNeededOnGCThing(obj))
+    else if (js::IsIncrementalBarrierNeededOnObject(obj))
         js::IncrementalReferenceBarrier(obj);
 }
 
 // Remove the gray color from the given JSObject and any other objects that can
 // be reached through it.
 MOZ_ALWAYS_INLINE JSObject *
 xpc_UnmarkGrayObject(JSObject *obj)
 {
@@ -161,17 +161,17 @@ xpc_UnmarkGrayObject(JSObject *obj)
 }
 
 inline JSScript *
 xpc_UnmarkGrayScript(JSScript *script)
 {
     if (script) {
         if (xpc_IsGrayGCThing(script))
             xpc_UnmarkGrayGCThingRecursive(script, JSTRACE_SCRIPT);
-        else if (JS::IsIncrementalBarrierNeededOnGCThing(script))
+        else if (js::IsIncrementalBarrierNeededOnScript(script))
             js::IncrementalReferenceBarrier(script);
     }
     return script;
 }
 
 inline JSContext *
 xpc_UnmarkGrayContext(JSContext *cx)
 {