Bug 776583 - Make post barrier verifier pass interpreter tests; r=billm
authorTerrence Cole <terrence@mozilla.com>
Mon, 16 Jul 2012 10:54:56 -0700
changeset 103322 258f0a5ed7f552678ee55dd15cf2cae62b1ad402
parent 103321 544b0cbdbb094dad18947f2994a3709d95621a96
child 103323 1bdc56a180bc2b8c588b4982103c3a7e6b6baa3e
push id13943
push usertcole@mozilla.com
push dateFri, 24 Aug 2012 20:12:05 +0000
treeherdermozilla-inbound@258f0a5ed7f5 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbillm
bugs776583
milestone17.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 776583 - Make post barrier verifier pass interpreter tests; r=billm
js/public/HashTable.h
js/src/gc/Barrier-inl.h
js/src/gc/Barrier.h
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/StoreBuffer.h
js/src/jsapi.cpp
js/src/jscompartment.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsinfer.cpp
js/src/jsproxy.cpp
js/src/jsscope.cpp
js/src/jsscope.h
js/src/jsscopeinlines.h
js/src/jstypedarray.cpp
js/src/jstypedarrayinlines.h
js/src/jswatchpoint.cpp
js/src/jswatchpoint.h
js/src/jsweakmap.cpp
js/src/jsweakmap.h
js/src/jsxml.cpp
js/src/vm/Debugger.cpp
js/src/vm/Debugger.h
js/src/vm/ObjectImpl-inl.h
js/src/vm/ObjectImpl.cpp
js/src/vm/ObjectImpl.h
js/src/vm/ScopeObject.cpp
js/src/vm/ScopeObject.h
js/src/vm/String-inl.h
js/src/vm/String.cpp
--- a/js/public/HashTable.h
+++ b/js/public/HashTable.h
@@ -209,19 +209,16 @@ class HashTable : private AllocPolicy
         }
 
         /*
          * Removes the |front()| element and re-inserts it into the table with
          * a new key at the new Lookup position.  |front()| is invalid after
          * this operation until the next call to |popFront()|.
          */
         void rekeyFront(const Lookup &l, const Key &k) {
-            JS_ASSERT(&k != &HashPolicy::getKey(this->cur->t));
-            if (match(*this->cur, l))
-                return;
             typename HashTableEntry<T>::NonConstT t = this->cur->t;
             HashPolicy::setKey(t, const_cast<Key &>(k));
             table.remove(*this->cur);
             table.putNewInfallible(l, t);
             rekeyed = true;
             this->validEntry = false;
         }
 
@@ -991,16 +988,19 @@ struct IsPodType<HashMapEntry<K, V> >
  * N.B: Due to the lack of exception handling, the user must call |init()|.
  */
 template <class Key,
           class Value,
           class HashPolicy = DefaultHasher<Key>,
           class AllocPolicy = TempAllocPolicy>
 class HashMap
 {
+    typedef typename tl::StaticAssert<tl::IsRelocatableHeapType<Key>::result>::result keyAssert;
+    typedef typename tl::StaticAssert<tl::IsRelocatableHeapType<Value>::result>::result valAssert;
+
   public:
     typedef typename HashPolicy::Lookup Lookup;
 
     typedef HashMapEntry<Key, Value> Entry;
 
   private:
     /* Implement HashMap using HashTable. Lift |Key| operations to |Entry|. */
     struct MapHashPolicy : HashPolicy
--- a/js/src/gc/Barrier-inl.h
+++ b/js/src/gc/Barrier-inl.h
@@ -11,16 +11,35 @@
 #include "gc/Barrier.h"
 #include "gc/Marking.h"
 
 #include "vm/ObjectImpl-inl.h"
 #include "vm/String-inl.h"
 
 namespace js {
 
+template <typename T>
+inline void
+RelocatablePtr<T>::post()
+{
+#ifdef JSGC_GENERATIONAL
+    JS_ASSERT(this->value);
+    this->value->compartment()->gcStoreBuffer.putRelocatableCell((gc::Cell **)&this->value);
+#endif
+}
+
+template <typename T>
+inline void
+RelocatablePtr<T>::relocate(JSCompartment *comp)
+{
+#ifdef JSGC_GENERATIONAL
+    comp->gcStoreBuffer.removeRelocatableCell((gc::Cell **)&this->value);
+#endif
+}
+
 inline void
 EncapsulatedValue::writeBarrierPre(const Value &value)
 {
 #ifdef JSGC_INCREMENTAL
     if (value.isMarkable()) {
         js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
         writeBarrierPre(cell->compartment(), value);
     }
@@ -132,23 +151,29 @@ HeapValue::set(JSCompartment *comp, cons
     value = v;
     post(comp);
 }
 
 inline void
 HeapValue::writeBarrierPost(const Value &value, Value *addr)
 {
 #ifdef JSGC_GENERATIONAL
+    if (value.isMarkable()) {
+        js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
+        cell->compartment()->gcStoreBuffer.putValue(addr);
+    }
 #endif
 }
 
 inline void
 HeapValue::writeBarrierPost(JSCompartment *comp, const Value &value, Value *addr)
 {
 #ifdef JSGC_GENERATIONAL
+    if (value.isMarkable())
+        comp->gcStoreBuffer.putValue(addr);
 #endif
 }
 
 inline void
 HeapValue::post()
 {
     writeBarrierPost(value, &value);
 }
@@ -207,30 +232,40 @@ RelocatableValue::operator=(const Reloca
     post();
     return *this;
 }
 
 inline void
 RelocatableValue::post()
 {
 #ifdef JSGC_GENERATIONAL
+    if (value.isMarkable()) {
+        js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
+        cell->compartment()->gcStoreBuffer.putRelocatableValue(&value);
+    }
 #endif
 }
 
 inline void
 RelocatableValue::post(JSCompartment *comp)
 {
 #ifdef JSGC_GENERATIONAL
+    if (value.isMarkable())
+        comp->gcStoreBuffer.putRelocatableValue(&value);
 #endif
 }
 
 inline void
 RelocatableValue::relocate()
 {
 #ifdef JSGC_GENERATIONAL
+    if (value.isMarkable()) {
+        js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
+        cell->compartment()->gcStoreBuffer.removeRelocatableValue(&value);
+    }
 #endif
 }
 
 inline
 HeapSlot::HeapSlot(JSObject *obj, uint32_t slot, const Value &v)
     : EncapsulatedValue(v)
 {
     JS_ASSERT(!IsPoisonedValue(v));
@@ -288,35 +323,114 @@ HeapSlot::set(JSCompartment *comp, JSObj
     JS_ASSERT(!IsPoisonedValue(v));
     value = v;
     post(comp, obj, slot);
 }
 
 inline void
 HeapSlot::writeBarrierPost(JSObject *obj, uint32_t slot)
 {
+#ifdef JSGC_GENERATIONAL
+    obj->compartment()->gcStoreBuffer.putSlot(obj, slot);
+#endif
 }
 
 inline void
 HeapSlot::writeBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t slot)
 {
+#ifdef JSGC_GENERATIONAL
+    comp->gcStoreBuffer.putSlot(obj, slot);
+#endif
 }
 
 inline void
 HeapSlot::post(JSObject *owner, uint32_t slot)
 {
     HeapSlot::writeBarrierPost(owner, slot);
 }
 
 inline void
 HeapSlot::post(JSCompartment *comp, JSObject *owner, uint32_t slot)
 {
     HeapSlot::writeBarrierPost(comp, owner, slot);
 }
 
+#ifdef JSGC_GENERATIONAL
+class SlotRangeRef : public gc::BufferableRef
+{
+    JSObject *owner;
+    uint32_t start;
+    uint32_t end;
+
+  public:
+    SlotRangeRef(JSObject *obj, uint32_t start, uint32_t end)
+      : owner(obj), start(start), end(end)
+    {
+        JS_ASSERT(start < end);
+    }
+
+    bool match(void *location) {
+        if (owner->isDenseArray()) {
+            uint32_t len = owner->getDenseArrayInitializedLength();
+            for (uint32_t i = Min(start, len); i < Min(end, len); ++i) {
+                if (&owner->getDenseArrayElement(i) == location)
+                    return true;
+            }
+            return false;
+        }
+        uint32_t span = owner->slotSpan();
+        for (uint32_t i = Min(start, span); i < Min(end, span); ++i) {
+            if (owner->getSlotAddress(i) == location)
+                return true;
+        }
+        return false;
+    }
+
+    void mark(JSTracer *trc) {
+        /* Apply forwarding, if we have already visited owner. */
+        IsObjectMarked(&owner);
+        if (owner->isDenseArray()) {
+            uint32_t initLen = owner->getDenseArrayInitializedLength();
+            uint32_t clampedStart = Min(start, initLen);
+            gc::MarkArraySlots(trc, Min(end, initLen) - clampedStart,
+                               owner->getDenseArrayElements() + clampedStart, "element");
+            return;
+        }
+        uint32_t span = owner->slotSpan();
+        uint32_t clampedStart = Min(start, span);
+        MarkObjectSlots(trc, owner, clampedStart, Min(end, span) - clampedStart);
+    }
+};
+#endif
+
+inline void
+SlotRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count)
+{
+#ifdef JSGC_GENERATIONAL
+    if (count > 0)
+        comp->gcStoreBuffer.putGeneric(SlotRangeRef(obj, start, start + count));
+#endif
+}
+
+inline
+EncapsulatedId::~EncapsulatedId()
+{
+    pre();
+}
+
+inline EncapsulatedId &
+EncapsulatedId::operator=(const EncapsulatedId &v)
+{
+    if (v.value != value)
+        pre();
+    JS_ASSERT(!IsPoisonedId(v.value));
+    value = v.value;
+    return *this;
+}
+
 inline void
 EncapsulatedId::pre()
 {
 #ifdef JSGC_INCREMENTAL
     if (JSID_IS_OBJECT(value)) {
         JSObject *obj = JSID_TO_OBJECT(value);
         JSCompartment *comp = obj->compartment();
         if (comp->needsBarrier()) {
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -124,17 +124,17 @@ class EncapsulatedPtr
   protected:
     union {
         T *value;
         Unioned other;
     };
 
   public:
     EncapsulatedPtr() : value(NULL) {}
-    explicit EncapsulatedPtr(T *v) : value(v) {}
+    EncapsulatedPtr(T *v) : value(v) {}
     explicit EncapsulatedPtr(const EncapsulatedPtr<T> &v) : value(v.value) {}
 
     ~EncapsulatedPtr() { pre(); }
 
     /* Use to set the pointer to NULL. */
     void clear() {
         pre();
         value = NULL;
@@ -217,44 +217,61 @@ class HeapPtr : public EncapsulatedPtr<T
                      HeapPtr<T2> &v2, T2 *val2);
 };
 
 template <class T>
 class RelocatablePtr : public EncapsulatedPtr<T>
 {
   public:
     RelocatablePtr() : EncapsulatedPtr<T>(NULL) {}
-    explicit RelocatablePtr(T *v) : EncapsulatedPtr<T>(v) { post(); }
-    explicit RelocatablePtr(const RelocatablePtr<T> &v)
-      : EncapsulatedPtr<T>(v) { post(); }
+    explicit RelocatablePtr(T *v) : EncapsulatedPtr<T>(v) {
+        if (v)
+            post();
+    }
+    explicit RelocatablePtr(const RelocatablePtr<T> &v) : EncapsulatedPtr<T>(v) {
+        if (this->value)
+            post();
+    }
 
     ~RelocatablePtr() {
-        this->pre();
-        relocate();
+        if (this->value)
+            relocate(this->value->compartment());
     }
 
     RelocatablePtr<T> &operator=(T *v) {
         this->pre();
         JS_ASSERT(!IsPoisonedPtr<T>(v));
-        this->value = v;
-        post();
+        if (v) {
+            this->value = v;
+            post();
+        } else if (this->value) {
+            JSCompartment *comp = this->value->compartment();
+            this->value = v;
+            relocate(comp);
+        }
         return *this;
     }
 
     RelocatablePtr<T> &operator=(const RelocatablePtr<T> &v) {
         this->pre();
         JS_ASSERT(!IsPoisonedPtr<T>(v.value));
-        this->value = v.value;
-        post();
+        if (v.value) {
+            this->value = v.value;
+            post();
+        } else if (this->value) {
+            JSCompartment *comp = this->value->compartment();
+            this->value = v;
+            relocate(comp);
+        }
         return *this;
     }
 
   protected:
-    void post() { T::writeBarrierRelocPost(this->value, (void *)&this->value); }
-    void relocate() { T::writeBarrierRelocated(this->value, (void *)&this->value); }
+    inline void post();
+    inline void relocate(JSCompartment *comp);
 };
 
 /*
  * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
  * barriers with only one branch to check if we're in an incremental GC.
  */
 template<class T1, class T2>
 static inline void
@@ -271,16 +288,19 @@ BarrieredSetPair(JSCompartment *comp,
     v1.post();
     v2.post();
 }
 
 struct Shape;
 class BaseShape;
 namespace types { struct TypeObject; }
 
+typedef EncapsulatedPtr<JSObject> EncapsulatedPtrObject;
+typedef EncapsulatedPtr<JSScript> EncapsulatedPtrScript;
+
 typedef RelocatablePtr<JSObject> RelocatablePtrObject;
 typedef RelocatablePtr<JSScript> RelocatablePtrScript;
 
 typedef HeapPtr<JSObject> HeapPtrObject;
 typedef HeapPtr<JSFunction> HeapPtrFunction;
 typedef HeapPtr<JSString> HeapPtrString;
 typedef HeapPtr<JSScript> HeapPtrScript;
 typedef HeapPtr<Shape> HeapPtrShape;
@@ -298,16 +318,29 @@ struct HeapPtrHasher
     static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
     static bool match(const Key &k, Lookup l) { return k.get() == l; }
 };
 
 /* Specialized hashing policy for HeapPtrs. */
 template <class T>
 struct DefaultHasher< HeapPtr<T> > : HeapPtrHasher<T> { };
 
+template<class T>
+struct EncapsulatedPtrHasher
+{
+    typedef EncapsulatedPtr<T> Key;
+    typedef T *Lookup;
+
+    static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
+    static bool match(const Key &k, Lookup l) { return k.get() == l; }
+};
+
+template <class T>
+struct DefaultHasher< EncapsulatedPtr<T> > : EncapsulatedPtrHasher<T> { };
+
 class EncapsulatedValue : public ValueOperations<EncapsulatedValue>
 {
   protected:
     Value value;
 
     /*
      * Ensure that EncapsulatedValue is not constructable, except by our
      * implementations.
@@ -423,18 +456,29 @@ class HeapSlot : public EncapsulatedValu
 
 /*
  * NOTE: This is a placeholder for bug 619558.
  *
  * Run a post write barrier that encompasses multiple contiguous slots in a
  * single step.
  */
 inline void
-SlotRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count)
+SlotRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count);
+
+/*
+ * This is a post barrier for HashTables whose key can be moved during a GC.
+ */
+template <class Map, class Key>
+inline void
+HashTableWriteBarrierPost(JSCompartment *comp, const Map *map, const Key &key)
 {
+#ifdef JS_GCGENERATIONAL
+    if (key && comp->gcNursery.isInside(key))
+        comp->gcStoreBuffer.putGeneric(HashKeyRef(map, key));
+#endif
 }
 
 static inline const Value *
 Valueify(const EncapsulatedValue *array)
 {
     JS_STATIC_ASSERT(sizeof(HeapValue) == sizeof(Value));
     JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
     return (const Value *)array;
@@ -462,25 +506,26 @@ class HeapSlotArray
     HeapSlotArray operator +(uint32_t offset) const { return HeapSlotArray(array + offset); }
 };
 
 class EncapsulatedId
 {
   protected:
     jsid value;
 
-    explicit EncapsulatedId() : value(JSID_VOID) {}
-    explicit inline EncapsulatedId(jsid id) : value(id) {}
-    ~EncapsulatedId() {}
-
   private:
     EncapsulatedId(const EncapsulatedId &v) MOZ_DELETE;
-    EncapsulatedId &operator=(const EncapsulatedId &v) MOZ_DELETE;
 
   public:
+    explicit EncapsulatedId() : value(JSID_VOID) {}
+    explicit EncapsulatedId(jsid id) : value(id) {}
+    ~EncapsulatedId();
+
+    inline EncapsulatedId &operator=(const EncapsulatedId &v);
+
     bool operator==(jsid id) const { return value == id; }
     bool operator!=(jsid id) const { return value != id; }
 
     jsid get() const { return value; }
     jsid *unsafeGet() { return &value; }
     operator jsid() const { return value; }
 
   protected:
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -134,17 +134,17 @@ static void
 MarkUnbarriered(JSTracer *trc, T **thingp, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkInternal(trc, thingp);
 }
 
 template <typename T>
 static void
-Mark(JSTracer *trc, HeapPtr<T> *thing, const char *name)
+Mark(JSTracer *trc, EncapsulatedPtr<T> *thing, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkInternal(trc, thing->unsafeGet());
 }
 
 template <typename T>
 static void
 MarkRoot(JSTracer *trc, T **thingp, const char *name)
@@ -187,17 +187,17 @@ IsMarked(T **thingp)
     JS_ASSERT(*thingp);
     if (!(*thingp)->compartment()->isCollecting())
         return true;
     return (*thingp)->isMarked();
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
-Mark##base(JSTracer *trc, HeapPtr<type> *thing, const char *name)                                 \
+Mark##base(JSTracer *trc, EncapsulatedPtr<type> *thing, const char *name)                         \
 {                                                                                                 \
     Mark<type>(trc, thing, name);                                                                 \
 }                                                                                                 \
                                                                                                   \
 void                                                                                              \
 Mark##base##Root(JSTracer *trc, type **thingp, const char *name)                                  \
 {                                                                                                 \
     MarkRoot<type>(trc, thingp, name);                                                            \
@@ -219,17 +219,17 @@ void Mark##base##RootRange(JSTracer *trc
     MarkRootRange<type>(trc, len, vec, name);                                                     \
 }                                                                                                 \
                                                                                                   \
 bool Is##base##Marked(type **thingp)                                                              \
 {                                                                                                 \
     return IsMarked<type>(thingp);                                                                \
 }                                                                                                 \
                                                                                                   \
-bool Is##base##Marked(HeapPtr<type> *thingp)                                                      \
+bool Is##base##Marked(EncapsulatedPtr<type> *thingp)                                              \
 {                                                                                                 \
     return IsMarked<type>(thingp->unsafeGet());                                                   \
 }
 
 DeclMarkerImpl(BaseShape, BaseShape)
 DeclMarkerImpl(BaseShape, UnownedBaseShape)
 DeclMarkerImpl(Object, ArgumentsObject)
 DeclMarkerImpl(Object, DebugScopeObject)
@@ -295,25 +295,29 @@ MarkGCThingRoot(JSTracer *trc, void **th
     MarkKind(trc, thingp, GetGCThingTraceKind(*thingp));
 }
 
 /*** ID Marking ***/
 
 static inline void
 MarkIdInternal(JSTracer *trc, jsid *id)
 {
-    JS_SET_TRACING_LOCATION(trc, (void *)id);
     if (JSID_IS_STRING(*id)) {
         JSString *str = JSID_TO_STRING(*id);
+        JS_SET_TRACING_LOCATION(trc, (void *)id);
         MarkInternal(trc, &str);
         *id = NON_INTEGER_ATOM_TO_JSID(reinterpret_cast<JSAtom *>(str));
     } else if (JS_UNLIKELY(JSID_IS_OBJECT(*id))) {
         JSObject *obj = JSID_TO_OBJECT(*id);
+        JS_SET_TRACING_LOCATION(trc, (void *)id);
         MarkInternal(trc, &obj);
         *id = OBJECT_TO_JSID(obj);
+    } else {
+        /* Unset realLocation manually if we do not call MarkInternal. */
+        JS_SET_TRACING_LOCATION(trc, NULL);
     }
 }
 
 void
 MarkId(JSTracer *trc, EncapsulatedId *id, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkIdInternal(trc, id->unsafeGet());
@@ -363,16 +367,17 @@ MarkValueInternal(JSTracer *trc, Value *
         void *thing = v->toGCThing();
         JS_SET_TRACING_LOCATION(trc, (void *)v);
         MarkKind(trc, &thing, v->gcKind());
         if (v->isString())
             v->setString((JSString *)thing);
         else
             v->setObjectOrNull((JSObject *)thing);
     } else {
+        /* Unset realLocation manually if we do not call MarkInternal. */
         JS_SET_TRACING_LOCATION(trc, NULL);
     }
 }
 
 void
 MarkValue(JSTracer *trc, EncapsulatedValue *v, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
@@ -610,17 +615,17 @@ PushMarkStack(GCMarker *gcmarker, BaseSh
 }
 
 static void
 ScanShape(GCMarker *gcmarker, Shape *shape)
 {
   restart:
     PushMarkStack(gcmarker, shape->base());
 
-    const HeapId &id = shape->propidRef();
+    const EncapsulatedId &id = shape->propidRef();
     if (JSID_IS_STRING(id))
         PushMarkStack(gcmarker, JSID_TO_STRING(id));
     else if (JS_UNLIKELY(JSID_IS_OBJECT(id)))
         PushMarkStack(gcmarker, JSID_TO_OBJECT(id));
 
     shape = shape->previous();
     if (shape && shape->markIfUnmarked(gcmarker->getMarkColor()))
         goto restart;
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -58,23 +58,23 @@ namespace gc {
  *     forgiving, since it doesn't demand a HeapPtr as an argument. Its use
  *     should always be accompanied by a comment explaining how write barriers
  *     are implemented for the given field.
  *
  * Additionally, the functions MarkObjectRange and MarkObjectRootRange are
  * defined for marking arrays of object pointers.
  */
 #define DeclMarker(base, type)                                                                    \
-void Mark##base(JSTracer *trc, HeapPtr<type> *thing, const char *name);                           \
+void Mark##base(JSTracer *trc, EncapsulatedPtr<type> *thing, const char *name);                   \
 void Mark##base##Root(JSTracer *trc, type **thingp, const char *name);                            \
 void Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name);                     \
 void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *thing, const char *name);        \
 void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name);            \
 bool Is##base##Marked(type **thingp);                                                             \
-bool Is##base##Marked(HeapPtr<type> *thingp);
+bool Is##base##Marked(EncapsulatedPtr<type> *thingp);
 
 DeclMarker(BaseShape, BaseShape)
 DeclMarker(BaseShape, UnownedBaseShape)
 DeclMarker(Object, ArgumentsObject)
 DeclMarker(Object, DebugScopeObject)
 DeclMarker(Object, GlobalObject)
 DeclMarker(Object, JSObject)
 DeclMarker(Object, JSFunction)
@@ -214,29 +214,29 @@ PushArena(GCMarker *gcmarker, ArenaHeade
 /*** Generic ***/
 
 /*
  * The Mark() functions interface should only be used by code that must be
  * templated.  Other uses should use the more specific, type-named functions.
  */
 
 inline void
-Mark(JSTracer *trc, HeapValue *v, const char *name)
+Mark(JSTracer *trc, EncapsulatedValue *v, const char *name)
 {
     MarkValue(trc, v, name);
 }
 
 inline void
-Mark(JSTracer *trc, HeapPtr<JSObject> *o, const char *name)
+Mark(JSTracer *trc, EncapsulatedPtrObject *o, const char *name)
 {
     MarkObject(trc, o, name);
 }
 
 inline void
-Mark(JSTracer *trc, HeapPtr<JSScript> *o, const char *name)
+Mark(JSTracer *trc, EncapsulatedPtrScript *o, const char *name)
 {
     MarkScript(trc, o, name);
 }
 
 #if JS_HAS_XML_SUPPORT
 inline void
 Mark(JSTracer *trc, HeapPtr<JSXML> *xml, const char *name)
 {
@@ -251,23 +251,23 @@ inline bool
 IsMarked(EncapsulatedValue *v)
 {
     if (!v->isMarkable())
         return true;
     return IsValueMarked(v->unsafeGet());
 }
 
 inline bool
-IsMarked(HeapPtrObject *objp)
+IsMarked(EncapsulatedPtrObject *objp)
 {
     return IsObjectMarked(objp);
 }
 
 inline bool
-IsMarked(HeapPtrScript *scriptp)
+IsMarked(EncapsulatedPtrScript *scriptp)
 {
     return IsScriptMarked(scriptp);
 }
 
 inline Cell *
 ToMarkable(const Value &v)
 {
     if (v.isMarkable())
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -36,16 +36,17 @@ class Nursery
 
     void disable() {
         if (!nursery.initialized())
             return;
         nursery.finish();
     }
 
     bool isInside(void *cell) const {
+        JS_ASSERT((uintptr_t(cell) & 0x3) == 0);
         return nursery.initialized() && nursery.has(cell);
     }
 
     void insertPointer(void *cell) {
         nursery.putNew(cell);
     }
 };
 
@@ -138,17 +139,17 @@ class StoreBuffer
 
         /* Compaction algorithms. */
         void compactNotInSet();
 
         /*
          * Attempts to reduce the usage of the buffer by removing unnecessary
          * entries.
          */
-        void compact();
+        virtual void compact();
 
         /* Add one item to the buffer. */
         void put(const T &v);
 
         /* For verification. */
         bool accumulateEdges(EdgeSet &edges);
     };
 
@@ -162,17 +163,17 @@ class StoreBuffer
         friend class StoreBuffer;
 
         RelocatableMonoTypeBuffer(StoreBuffer *owner, Nursery *nursery)
           : MonoTypeBuffer<T>(owner, nursery)
         {}
 
         /* Override compaction to filter out removed items. */
         void compactMoved();
-        void compact();
+        virtual void compact();
 
         /* Record a removal from the buffer. */
         void unput(const T &v);
     };
 
     class GenericBuffer
     {
         friend class StoreBuffer;
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -4583,49 +4583,40 @@ static Class prop_iter_class = {
     NULL,           /* hasInstance */
     prop_iter_trace
 };
 
 JS_PUBLIC_API(JSObject *)
 JS_NewPropertyIterator(JSContext *cx, JSObject *objArg)
 {
     RootedObject obj(cx, objArg);
-    JSObject *iterobj;
-    void *pdata;
-    int index;
-    JSIdArray *ida;
 
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
     assertSameCompartment(cx, obj);
-    iterobj = NewObjectWithClassProto(cx, &prop_iter_class, NULL, obj);
-    AssertRootingUnnecessary safe(cx);
+
+    RootedObject iterobj(cx, NewObjectWithClassProto(cx, &prop_iter_class, NULL, obj));
     if (!iterobj)
         return NULL;
 
+    int index;
     if (obj->isNative()) {
         /* Native case: start with the last property in obj. */
-        pdata = (void *)obj->lastProperty();
+        iterobj->setPrivateGCThing(obj->lastProperty());
         index = -1;
     } else {
-        /*
-         * Non-native case: enumerate a JSIdArray and keep it via private.
-         *
-         * Note: we have to make sure that we root obj around the call to
-         * JS_Enumerate to protect against multiple allocations under it.
-         */
-        ida = JS_Enumerate(cx, obj);
+        /* Non-native case: enumerate a JSIdArray and keep it via private. */
+        JSIdArray *ida = JS_Enumerate(cx, obj);
         if (!ida)
             return NULL;
-        pdata = ida;
+        iterobj->setPrivate((void *)ida);
         index = ida->length;
     }
 
     /* iterobj cannot escape to other threads here. */
-    iterobj->setPrivate(pdata);
     iterobj->setSlot(JSSLOT_ITER_INDEX, Int32Value(index));
     return iterobj;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_NextProperty(JSContext *cx, JSObject *iterobjArg, jsid *idp)
 {
     RootedObject iterobj(cx, iterobjArg);
@@ -4645,17 +4636,17 @@ JS_NextProperty(JSContext *cx, JSObject 
 
         while (shape->previous() && !shape->enumerable())
             shape = shape->previous();
 
         if (!shape->previous()) {
             JS_ASSERT(shape->isEmptyShape());
             *idp = JSID_VOID;
         } else {
-            iterobj->setPrivate(const_cast<Shape *>(shape->previous().get()));
+            iterobj->setPrivateGCThing(const_cast<Shape *>(shape->previous().get()));
             *idp = shape->propid();
         }
     } else {
         /* Non-native case: use the ida enumerated when iterobj was created. */
         ida = (JSIdArray *) iterobj->getPrivate();
         JS_ASSERT(i <= ida->length);
         STATIC_ASSUME(i <= ida->length);
         if (i == 0) {
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -601,17 +601,17 @@ JSCompartment::sweepCrossCompartmentWrap
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         CrossCompartmentKey key = e.front().key;
         bool keyMarked = IsCellMarked(&key.wrapped);
         bool valMarked = IsValueMarked(e.front().value.unsafeGet());
         bool dbgMarked = !key.debugger || IsObjectMarked(&key.debugger);
         JS_ASSERT_IF(!keyMarked && valMarked, key.kind == CrossCompartmentKey::StringWrapper);
         if (!keyMarked || !valMarked || !dbgMarked)
             e.removeFront();
-        else
+        else if (key.wrapped != e.front().key.wrapped || key.debugger != e.front().key.debugger)
             e.rekeyFront(key);
     }
 }
 
 void
 JSCompartment::purge()
 {
     dtoaCache.purge();
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1243,16 +1243,17 @@ RecordNativeStackTopForGC(JSRuntime *rt)
     cgcd->recordStackTop();
 }
 
 } /* namespace js */
 
 bool
 js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, gc::AllocKind *thingKind, void **thing)
 {
+    rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
     return js::IsAddressableGCThing(rt, w, false, thingKind, NULL, thing) == CGCT_VALID;
 }
 
 #ifdef DEBUG
 static void
 CheckLeakedRoots(JSRuntime *rt);
 #endif
 
@@ -5404,16 +5405,18 @@ EndVerifyPostBarriers(JSRuntime *rt)
     VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData;
     JS_TracerInit(trc, rt, PostVerifierVisitEdge);
     trc->count = 0;
 
     if (!rt->gcExactScanningEnabled)
         goto oom;
 
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        if (c->gcStoreBuffer.hasOverflowed())
+            continue;
         if (!c->gcStoreBuffer.coalesceForVerification())
             goto oom;
     }
 
     /* Walk the heap. */
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         if (!c->gcStoreBuffer.isEnabled() ||
              c->gcStoreBuffer.hasOverflowed() ||
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -661,20 +661,20 @@ class GCHelperThread {
     void finish();
 
     /* Must be called with the GC lock taken. */
     void startBackgroundSweep(bool shouldShrink);
 
     /* Must be called with the GC lock taken. */
     void startBackgroundShrink();
 
-    /* Must be called with the GC lock taken. */
+    /* Must be called without the GC lock taken. */
     void waitBackgroundSweepEnd();
 
-    /* Must be called with the GC lock taken. */
+    /* Must be called without the GC lock taken. */
     void waitBackgroundSweepOrAllocEnd();
 
     /* Must be called with the GC lock taken. */
     inline void startBackgroundAllocationIfIdle();
 
     bool canBackgroundAllocate() const {
         return backgroundAllocation;
     }
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -5951,17 +5951,17 @@ TypeCompartment::sweep(FreeOp *fop)
 
     if (allocationSiteTable) {
         for (AllocationSiteTable::Enum e(*allocationSiteTable); !e.empty(); e.popFront()) {
             AllocationSiteKey key = e.front().key;
             bool keyMarked = IsScriptMarked(&key.script);
             bool valMarked = IsTypeObjectMarked(e.front().value.unsafeGet());
             if (!keyMarked || !valMarked)
                 e.removeFront();
-            else
+            else if (key.script != e.front().key.script)
                 e.rekeyFront(key);
         }
     }
 
     /*
      * The pending array is reset on GC, it can grow large (75+ KB) and is easy
      * to reallocate if the compartment becomes active again.
      */
--- a/js/src/jsproxy.cpp
+++ b/js/src/jsproxy.cpp
@@ -1677,17 +1677,17 @@ proxy_TraceObject(JSTracer *trc, JSObjec
         JSObject *referent = &GetProxyPrivate(obj).toObject();
         if (referent->compartment() != obj->compartment()) {
             /*
              * Assert that this proxy is tracked in the wrapper map. We maintain
              * the invariant that the wrapped object is the key in the wrapper map.
              */
             Value key = ObjectValue(*referent);
             WrapperMap::Ptr p = obj->compartment()->crossCompartmentWrappers.lookup(key);
-            JS_ASSERT(p->value.get() == ObjectValue(*obj));
+            JS_ASSERT(*p->value.unsafeGet() == ObjectValue(*obj));
         }
     }
 #endif
 
     // NB: If you add new slots here, make sure to change
     // js::NukeChromeCrossCompartmentWrappers to cope.
     MarkCrossCompartmentSlot(trc, &obj->getReservedSlotRef(JSSLOT_PROXY_PRIVATE), "private");
     MarkSlot(trc, &obj->getReservedSlotRef(JSSLOT_PROXY_EXTRA + 0), "extra0");
@@ -1941,22 +1941,22 @@ js::NewProxyObject(JSContext *cx, BasePr
      * their prototype changes later.
      */
     if (proto && !proto->setNewTypeUnknown(cx))
         return NULL;
 
     RootedObject obj(cx, NewObjectWithGivenProto(cx, clasp, proto, parent));
     if (!obj)
         return NULL;
-    obj->setSlot(JSSLOT_PROXY_HANDLER, PrivateValue(handler));
-    obj->setSlot(JSSLOT_PROXY_PRIVATE, priv);
+    obj->initSlot(JSSLOT_PROXY_HANDLER, PrivateValue(handler));
+    obj->initCrossCompartmentSlot(JSSLOT_PROXY_PRIVATE, priv);
     if (fun) {
-        obj->setSlot(JSSLOT_PROXY_CALL, call ? ObjectValue(*call) : UndefinedValue());
+        obj->initCrossCompartmentSlot(JSSLOT_PROXY_CALL, call ? ObjectValue(*call) : UndefinedValue());
         if (construct) {
-            obj->setSlot(JSSLOT_PROXY_CONSTRUCT, ObjectValue(*construct));
+            obj->initSlot(JSSLOT_PROXY_CONSTRUCT, ObjectValue(*construct));
         }
     }
 
     /* Don't track types of properties of proxies. */
     MarkTypeObjectUnknownProperties(cx, obj->type());
 
     /* Mark the new proxy as having singleton type. */
     if (clasp == &OuterWindowProxyClass && !JSObject::setSingletonType(cx, obj))
--- a/js/src/jsscope.cpp
+++ b/js/src/jsscope.cpp
@@ -1303,18 +1303,20 @@ JSCompartment::sweepInitialShapeTable()
             if (!IsShapeMarked(&shape) || (proto && !IsObjectMarked(&proto))) {
                 e.removeFront();
             } else {
 #ifdef DEBUG
                 JSObject *parent = shape->getObjectParent();
                 JS_ASSERT(!parent || IsObjectMarked(&parent));
                 JS_ASSERT(parent == shape->getObjectParent());
 #endif
-                InitialShapeEntry newKey(shape, proto);
-                e.rekeyFront(newKey.getLookup(), newKey);
+                if (shape != entry.shape || proto != entry.proto) {
+                    InitialShapeEntry newKey(shape, proto);
+                    e.rekeyFront(newKey.getLookup(), newKey);
+                }
             }
         }
     }
 }
 
 /*
  * Property lookup hooks on non-native objects are required to return a non-NULL
  * shape to signify that the property has been found. The actual shape returned
--- a/js/src/jsscope.h
+++ b/js/src/jsscope.h
@@ -453,17 +453,17 @@ struct Shape : public js::gc::Cell
     friend class js::ObjectImpl;
     friend class js::PropertyTree;
     friend class js::StaticBlockObject;
     friend struct js::StackShape;
     friend struct js::StackBaseShape;
 
   protected:
     HeapPtrBaseShape    base_;
-    HeapId              propid_;
+    EncapsulatedId      propid_;
 
     JS_ENUM_HEADER(SlotInfo, uint32_t)
     {
         /* Number of fixed slots in objects with this shape. */
         FIXED_SLOTS_MAX        = 0x1f,
         FIXED_SLOTS_SHIFT      = 27,
         FIXED_SLOTS_MASK       = uint32_t(FIXED_SLOTS_MAX << FIXED_SLOTS_SHIFT),
 
@@ -739,22 +739,22 @@ struct Shape : public js::gc::Cell
 
     void incrementNumLinearSearches() {
         uint32_t count = numLinearSearches();
         JS_ASSERT(count < LINEAR_SEARCHES_MAX);
         slotInfo = slotInfo & ~LINEAR_SEARCHES_MASK;
         slotInfo = slotInfo | ((count + 1) << LINEAR_SEARCHES_SHIFT);
     }
 
-    const HeapId &propid() const {
+    const EncapsulatedId &propid() const {
         JS_ASSERT(!isEmptyShape());
         JS_ASSERT(!JSID_IS_VOID(propid_));
         return propid_;
     }
-    HeapId &propidRef() { JS_ASSERT(!JSID_IS_VOID(propid_)); return propid_; }
+    EncapsulatedId &propidRef() { JS_ASSERT(!JSID_IS_VOID(propid_)); return propid_; }
 
     int16_t shortid() const { JS_ASSERT(hasShortID()); return maybeShortid(); }
     int16_t maybeShortid() const { return shortid_; }
 
     /*
      * If SHORTID is set in shape->flags, we use shape->shortid rather
      * than id when calling shape's getter or setter.
      */
--- a/js/src/jsscopeinlines.h
+++ b/js/src/jsscopeinlines.h
@@ -26,16 +26,32 @@
 #include "jscntxtinlines.h"
 #include "jsgcinlines.h"
 #include "jsobjinlines.h"
 
 #include "vm/ScopeObject-inl.h"
 
 namespace js {
 
+static inline void
+GetterSetterWriteBarrierPost(JSCompartment *comp, JSObject **objp)
+{
+#ifdef JSGC_GENERATIONAL
+    comp->gcStoreBuffer.putRelocatableCell(reinterpret_cast<gc::Cell **>(objp));
+#endif
+}
+
+static inline void
+GetterSetterWriteBarrierPostRemove(JSCompartment *comp, JSObject **objp)
+{
+#ifdef JSGC_GENERATIONAL
+    comp->gcStoreBuffer.removeRelocatableCell(reinterpret_cast<gc::Cell **>(objp));
+#endif
+}
+
 inline
 BaseShape::BaseShape(Class *clasp, JSObject *parent, uint32_t objectFlags)
 {
     JS_ASSERT(!(objectFlags & ~OBJECT_FLAG_MASK));
     PodZero(this);
     this->clasp = clasp;
     this->parent = parent;
     this->flags = objectFlags;
@@ -49,59 +65,61 @@ BaseShape::BaseShape(Class *clasp, JSObj
     PodZero(this);
     this->clasp = clasp;
     this->parent = parent;
     this->flags = objectFlags;
     this->rawGetter = rawGetter;
     this->rawSetter = rawSetter;
     if ((attrs & JSPROP_GETTER) && rawGetter) {
         this->flags |= HAS_GETTER_OBJECT;
-        JSObject::writeBarrierPost(this->getterObj, &this->getterObj);
+        GetterSetterWriteBarrierPost(compartment(), &this->getterObj);
     }
     if ((attrs & JSPROP_SETTER) && rawSetter) {
         this->flags |= HAS_SETTER_OBJECT;
-        JSObject::writeBarrierPost(this->setterObj, &this->setterObj);
+        GetterSetterWriteBarrierPost(compartment(), &this->setterObj);
     }
 }
 
 inline
 BaseShape::BaseShape(const StackBaseShape &base)
 {
     PodZero(this);
     this->clasp = base.clasp;
     this->parent = base.parent;
     this->flags = base.flags;
     this->rawGetter = base.rawGetter;
     this->rawSetter = base.rawSetter;
     if ((base.flags & HAS_GETTER_OBJECT) && base.rawGetter) {
-        JSObject::writeBarrierPost(this->getterObj, &this->getterObj);
+        GetterSetterWriteBarrierPost(compartment(), &this->getterObj);
     }
     if ((base.flags & HAS_SETTER_OBJECT) && base.rawSetter) {
-        JSObject::writeBarrierPost(this->setterObj, &this->setterObj);
+        GetterSetterWriteBarrierPost(compartment(), &this->setterObj);
     }
 }
 
 inline BaseShape &
 BaseShape::operator=(const BaseShape &other)
 {
     clasp = other.clasp;
     parent = other.parent;
     flags = other.flags;
     slotSpan_ = other.slotSpan_;
     if (flags & HAS_GETTER_OBJECT) {
         getterObj = other.getterObj;
-        JSObject::writeBarrierPost(getterObj, &getterObj);
+        GetterSetterWriteBarrierPost(compartment(), &getterObj);
     } else {
         rawGetter = other.rawGetter;
+        GetterSetterWriteBarrierPostRemove(compartment(), &getterObj);
     }
     if (flags & HAS_SETTER_OBJECT) {
         setterObj = other.setterObj;
-        JSObject::writeBarrierPost(setterObj, &setterObj);
+        GetterSetterWriteBarrierPost(compartment(), &setterObj);
     } else {
         rawSetter = other.rawSetter;
+        GetterSetterWriteBarrierPostRemove(compartment(), &setterObj);
     }
     return *this;
 }
 
 inline bool
 BaseShape::matchesGetterSetter(PropertyOp rawGetter, StrictPropertyOp rawSetter) const
 {
     return rawGetter == this->rawGetter && rawSetter == this->rawSetter;
--- a/js/src/jstypedarray.cpp
+++ b/js/src/jstypedarray.cpp
@@ -322,16 +322,17 @@ void
 ArrayBufferObject::obj_trace(JSTracer *trc, JSObject *obj)
 {
     /*
      * If this object changes, it will get marked via the private data barrier,
      * so it's safe to leave it Unbarriered.
      */
     JSObject *delegate = static_cast<JSObject*>(obj->getPrivate());
     if (delegate) {
+        JS_SET_TRACING_LOCATION(trc, &obj->privateRef(obj->numFixedSlots()));
         MarkObjectUnbarriered(trc, &delegate, "arraybuffer.delegate");
         obj->setPrivateUnbarriered(delegate);
     }
 }
 
 JSBool
 ArrayBufferObject::obj_lookupGeneric(JSContext *cx, HandleObject obj, HandleId id,
                                      MutableHandleObject objp, MutableHandleShape propp)
@@ -1268,23 +1269,17 @@ class TypedArrayTemplate
         }
 
         obj->setSlot(FIELD_TYPE, Int32Value(ArrayTypeID()));
         obj->setSlot(FIELD_BUFFER, ObjectValue(*bufobj));
 
         JS_ASSERT(bufobj->isArrayBuffer());
         Rooted<ArrayBufferObject *> buffer(cx, &bufobj->asArrayBuffer());
 
-        /*
-         * N.B. The base of the array's data is stored in the object's
-         * private data rather than a slot, to avoid alignment restrictions
-         * on private Values.
-         */
-        obj->setPrivate(buffer->dataPointer() + byteOffset);
-
+        InitTypedArrayDataPointer(obj, buffer, byteOffset);
         obj->setSlot(FIELD_LENGTH, Int32Value(len));
         obj->setSlot(FIELD_BYTEOFFSET, Int32Value(byteOffset));
         obj->setSlot(FIELD_BYTELENGTH, Int32Value(len * sizeof(NativeType)));
 
         JS_ASSERT(obj->getClass() == protoClass());
 
         js::Shape *empty = EmptyShape::getInitialShape(cx, fastClass(),
                                                        obj->getProto(), obj->getParent(),
--- a/js/src/jstypedarrayinlines.h
+++ b/js/src/jstypedarrayinlines.h
@@ -152,16 +152,53 @@ TypedArray::slotWidth(JSObject *obj) {
 }
 
 bool
 DataViewObject::is(const Value &v)
 {
     return v.isObject() && v.toObject().hasClass(&DataViewClass);
 }
 
+#ifdef JSGC_GENERATIONAL
+class TypedArrayPrivateRef : public gc::BufferableRef
+{
+    JSObject *obj;
+    ArrayBufferObject *buffer;
+    size_t byteOffset;
+
+  public:
+    TypedArrayPrivateRef(JSObject *obj, ArrayBufferObject *buffer, size_t byteOffset)
+      : obj(obj), buffer(buffer), byteOffset(byteOffset) {}
+
+    bool match(void *location) {
+        // The private field  of obj is not traced, but needs to be updated by mark.
+        return false;
+    }
+
+    void mark(JSTracer *trc) {}
+};
+#endif
+
+static inline void
+InitTypedArrayDataPointer(JSObject *obj, ArrayBufferObject *buffer, size_t byteOffset)
+{
+    /*
+     * N.B. The base of the array's data is stored in the object's
+     * private data rather than a slot to avoid alignment restrictions
+     * on private Values.
+     */
+    obj->initPrivate(buffer->dataPointer() + byteOffset);
+#ifdef JSGC_GENERATIONAL
+    JSCompartment *comp = obj->compartment();
+    JS_ASSERT(comp == buffer->compartment());
+    if (comp->gcNursery.isInside(buffer))
+        comp->gcStoreBuffer.putGeneric(TypedArrayPrivateRef(obj, buffer, byteOffset));
+#endif
+}
+
 inline DataViewObject *
 DataViewObject::create(JSContext *cx, uint32_t byteOffset, uint32_t byteLength,
                        Handle<ArrayBufferObject*> arrayBuffer, JSObject *protoArg)
 {
     JS_ASSERT(byteOffset <= INT32_MAX);
     JS_ASSERT(byteLength <= INT32_MAX);
 
     RootedObject proto(cx, protoArg);
@@ -189,17 +226,17 @@ DataViewObject::create(JSContext *cx, ui
     }
 
     JS_ASSERT(arrayBuffer->isArrayBuffer());
 
     DataViewObject &dvobj = obj->asDataView();
     dvobj.setFixedSlot(BYTEOFFSET_SLOT, Int32Value(byteOffset));
     dvobj.setFixedSlot(BYTELENGTH_SLOT, Int32Value(byteLength));
     dvobj.setFixedSlot(BUFFER_SLOT, ObjectValue(*arrayBuffer));
-    dvobj.setPrivate(arrayBuffer->dataPointer() + byteOffset);
+    InitTypedArrayDataPointer(obj, arrayBuffer, byteOffset);
     JS_ASSERT(byteOffset + byteLength <= arrayBuffer->byteLength());
 
     JS_ASSERT(dvobj.numFixedSlots() == RESERVED_SLOTS);
 
     return &dvobj;
 }
 
 inline uint32_t
--- a/js/src/jswatchpoint.cpp
+++ b/js/src/jswatchpoint.cpp
@@ -44,16 +44,32 @@ class AutoEntryHolder {
 };
 
 bool
 WatchpointMap::init()
 {
     return map.init();
 }
 
+static void
+WatchpointWriteBarrierPost(JSCompartment *comp, WatchpointMap::Map *map, const WatchKey &key,
+                           const Watchpoint &val)
+{
+#ifdef JSGC_GENERATIONAL
+    if ((JSID_IS_OBJECT(key.id) && comp->gcNursery.isInside(JSID_TO_OBJECT(key.id))) ||
+        (JSID_IS_STRING(key.id) && comp->gcNursery.isInside(JSID_TO_STRING(key.id))) ||
+        comp->gcNursery.isInside(key.object) ||
+        comp->gcNursery.isInside(val.closure))
+    {
+        typedef HashKeyRef<WatchpointMap::Map, WatchKey> WatchKeyRef;
+        comp->gcStoreBuffer.putGeneric(WatchKeyRef(map, key));
+    }
+#endif
+}
+
 bool
 WatchpointMap::watch(JSContext *cx, HandleObject obj, HandleId id,
                      JSWatchPointHandler handler, HandleObject closure)
 {
     JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id));
 
     if (!obj->setWatched(cx))
         return false;
@@ -61,16 +77,17 @@ WatchpointMap::watch(JSContext *cx, Hand
     Watchpoint w;
     w.handler = handler;
     w.closure = closure;
     w.held = false;
     if (!map.put(WatchKey(obj, id), w)) {
         js_ReportOutOfMemory(cx);
         return false;
     }
+    WatchpointWriteBarrierPost(obj->compartment(), &map, WatchKey(obj, id), w);
     return true;
 }
 
 void
 WatchpointMap::unwatch(JSObject *obj, jsid id,
                        JSWatchPointHandler *handlerp, JSObject **closurep)
 {
     if (Map::Ptr p = map.lookup(WatchKey(obj, id))) {
@@ -138,56 +155,58 @@ WatchpointMap::markAllIteratively(JSTrac
 }
 
 bool
 WatchpointMap::markIteratively(JSTracer *trc)
 {
     bool marked = false;
     for (Map::Enum e(map); !e.empty(); e.popFront()) {
         Map::Entry &entry = e.front();
-        JSObject *keyObj = entry.key.object;
-        jsid keyId(entry.key.id.get());
-        bool objectIsLive = IsObjectMarked(&keyObj);
+        JSObject *priorKeyObj = entry.key.object;
+        jsid priorKeyId(entry.key.id.get());
+        bool objectIsLive = IsObjectMarked(const_cast<EncapsulatedPtrObject *>(&entry.key.object));
         if (objectIsLive || entry.value.held) {
             if (!objectIsLive) {
-                MarkObjectUnbarriered(trc, &keyObj, "held Watchpoint object");
+                MarkObject(trc, const_cast<EncapsulatedPtrObject *>(&entry.key.object),
+                           "held Watchpoint object");
                 marked = true;
             }
 
-            JS_ASSERT(JSID_IS_STRING(keyId) || JSID_IS_INT(keyId));
-            MarkIdUnbarriered(trc, &keyId, "WatchKey::id");
+            JS_ASSERT(JSID_IS_STRING(priorKeyId) || JSID_IS_INT(priorKeyId));
+            MarkId(trc, const_cast<EncapsulatedId *>(&entry.key.id), "WatchKey::id");
 
             if (entry.value.closure && !IsObjectMarked(&entry.value.closure)) {
                 MarkObject(trc, &entry.value.closure, "Watchpoint::closure");
                 marked = true;
             }
 
-            /* We will sweep this entry if !objectIsLive. */
-            if (keyObj != entry.key.object || keyId != entry.key.id)
-                e.rekeyFront(WatchKey(keyObj, keyId));
+            /* We will sweep this entry in sweepAll if !objectIsLive. */
+            if (priorKeyObj != entry.key.object || priorKeyId != entry.key.id)
+                e.rekeyFront(WatchKey(entry.key.object, entry.key.id));
         }
     }
     return marked;
 }
 
 void
 WatchpointMap::markAll(JSTracer *trc)
 {
     for (Map::Enum e(map); !e.empty(); e.popFront()) {
         Map::Entry &entry = e.front();
-        JSObject *keyObj = entry.key.object;
-        jsid keyId = entry.key.id;
-        JS_ASSERT(JSID_IS_STRING(keyId) || JSID_IS_INT(keyId));
+        JSObject *priorKeyObj = entry.key.object;
+        jsid priorKeyId = entry.key.id;
+        JS_ASSERT(JSID_IS_STRING(priorKeyId) || JSID_IS_INT(priorKeyId));
 
-        MarkObjectUnbarriered(trc, &keyObj, "held Watchpoint object");
-        MarkIdUnbarriered(trc, &keyId, "WatchKey::id");
+        MarkObject(trc, const_cast<EncapsulatedPtrObject *>(&entry.key.object),
+                   "held Watchpoint object");
+        MarkId(trc, const_cast<EncapsulatedId *>(&entry.key.id), "WatchKey::id");
         MarkObject(trc, &entry.value.closure, "Watchpoint::closure");
 
-        if (keyObj != entry.key.object || keyId != entry.key.id)
-            e.rekeyFront(WatchKey(keyObj, keyId));
+        if (priorKeyObj != entry.key.object || priorKeyId != entry.key.id)
+            e.rekeyFront(entry.key);
     }
 }
 
 void
 WatchpointMap::sweepAll(JSRuntime *rt)
 {
     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
         if (WatchpointMap *wpmap = c->watchpointMap)
@@ -195,21 +214,21 @@ WatchpointMap::sweepAll(JSRuntime *rt)
     }
 }
 
 void
 WatchpointMap::sweep()
 {
     for (Map::Enum e(map); !e.empty(); e.popFront()) {
         Map::Entry &entry = e.front();
-        HeapPtrObject obj(entry.key.object);
+        RelocatablePtrObject obj(entry.key.object);
         if (!IsObjectMarked(&obj)) {
             JS_ASSERT(!entry.value.held);
             e.removeFront();
-        } else {
+        } else if (obj != entry.key.object) {
             e.rekeyFront(WatchKey(obj, entry.key.id));
         }
     }
 }
 
 void
 WatchpointMap::traceAll(WeakMapTracer *trc)
 {
--- a/js/src/jswatchpoint.h
+++ b/js/src/jswatchpoint.h
@@ -16,23 +16,23 @@
 #include "js/HashTable.h"
 
 namespace js {
 
 struct WatchKey {
     WatchKey() {}
     WatchKey(JSObject *obj, jsid id) : object(obj), id(id) {}
     WatchKey(const WatchKey &key) : object(key.object.get()), id(key.id.get()) {}
-    HeapPtrObject object;
-    HeapId id;
+    EncapsulatedPtrObject object;
+    EncapsulatedId id;
 };
 
 struct Watchpoint {
     JSWatchPointHandler handler;
-    HeapPtrObject closure;
+    RelocatablePtrObject closure;
     bool held;  /* true if currently running handler */
 };
 
 template <>
 struct DefaultHasher<WatchKey> {
     typedef WatchKey Lookup;
     static inline js::HashNumber hash(const Lookup &key);
 
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -86,17 +86,17 @@ WeakMapBase::restoreWeakMapList(JSRuntim
         JS_ASSERT(m->next == WeakMapNotInList);
         m->next = rt->gcWeakMapList;
         rt->gcWeakMapList = m;
     }
 }
 
 } /* namespace js */
 
-typedef WeakMap<HeapPtrObject, HeapValue> ObjectValueMap;
+typedef WeakMap<EncapsulatedPtrObject, RelocatableValue> ObjectValueMap;
 
 static ObjectValueMap *
 GetObjectMap(JSObject *obj)
 {
     JS_ASSERT(obj->isWeakMap());
     return (ObjectValueMap *)obj->getPrivate();
 }
 
@@ -257,16 +257,17 @@ WeakMap_set_impl(JSContext *cx, CallArgs
             return false;
         }
     }
 
     if (!map->put(key, value)) {
         JS_ReportOutOfMemory(cx);
         return false;
     }
+    HashTableWriteBarrierPost(cx->compartment, map, key);
 
     args.rval().setUndefined();
     return true;
 }
 
 JSBool
 WeakMap_set(JSContext *cx, unsigned argc, Value *vp)
 {
@@ -281,17 +282,17 @@ JS_NondeterministicGetWeakMapKeys(JSCont
         *ret = NULL;
         return true;
     }
     RootedObject arr(cx, NewDenseEmptyArray(cx));
     if (!arr)
         return false;
     ObjectValueMap *map = GetObjectMap(obj);
     if (map) {
-        for (ObjectValueMap::Range r = map->nondeterministicAll(); !r.empty(); r.popFront()) {
+        for (ObjectValueMap::Base::Range r = map->all(); !r.empty(); r.popFront()) {
             RootedObject key(cx, r.front().key);
             // Re-wrapping the key (see comment of GetKeyArg)
             if (!JS_WrapObject(cx, key.address()))
                 return false;
 
             if (!js_NewbornArrayPush(cx, arr, ObjectValue(*key)))
                 return false;
         }
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -142,32 +142,26 @@ class WeakMapBase {
     // has NULL as its next. Maps not in the list have WeakMapNotInList as their
     // next.  We must distinguish these cases to avoid creating infinite lists
     // when a weak map gets traced twice due to delayed marking.
     WeakMapBase *next;
 };
 
 template <class Key, class Value,
           class HashPolicy = DefaultHasher<Key> >
-class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, public WeakMapBase {
-  private:
+class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, public WeakMapBase
+{
+  public:
     typedef HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy> Base;
-
-  public:
     typedef typename Base::Enum Enum;
     typedef typename Base::Range Range;
 
     explicit WeakMap(JSRuntime *rt, JSObject *memOf=NULL) : Base(rt), WeakMapBase(memOf) { }
     explicit WeakMap(JSContext *cx, JSObject *memOf=NULL) : Base(cx), WeakMapBase(memOf) { }
 
-    /* Use with caution, as result can be affected by garbage collection. */
-    Range nondeterministicAll() {
-        return Base::all();
-    }
-
   private:
     bool markValue(JSTracer *trc, Value *x) {
         if (gc::IsMarked(x))
             return false;
         gc::Mark(trc, x, "WeakMap entry");
         return true;
     }
 
@@ -175,22 +169,22 @@ class WeakMap : public HashMap<Key, Valu
         for (Range r = Base::all(); !r.empty(); r.popFront())
             markValue(trc, &r.front().value);
     }
 
     bool markIteratively(JSTracer *trc) {
         bool markedAny = false;
         for (Enum e(*this); !e.empty(); e.popFront()) {
             /* If the entry is live, ensure its key and value are marked. */
-            Key k(e.front().key);
-            bool keyIsMarked = gc::IsMarked(&k);
-            if (keyIsMarked) {
+            Key prior(e.front().key);
+            if (gc::IsMarked(const_cast<Key *>(&e.front().key))) {
                 if (markValue(trc, &e.front().value))
                     markedAny = true;
-                e.rekeyFront(k);
+                if (prior != e.front().key)
+                    e.rekeyFront(e.front().key);
             }
         }
         return markedAny;
     }
 
     void sweep(JSTracer *trc) {
         /* Remove all entries whose keys remain unmarked. */
         for (Enum e(*this); !e.empty(); e.popFront()) {
--- a/js/src/jsxml.cpp
+++ b/js/src/jsxml.cpp
@@ -3031,17 +3031,17 @@ static JSXML *
 DeepCopy(JSContext *cx, JSXML *xml, JSObject *obj, unsigned flags)
 {
     JSXML *copy;
 
     copy = DeepCopyInLRS(cx, xml, flags);
     if (copy) {
         if (obj) {
             /* Caller provided the object for this copy, hook 'em up. */
-            obj->setPrivate(copy);
+            obj->setPrivateGCThing(copy);
             copy->object = obj;
         } else if (!js_GetXMLObject(cx, copy)) {
             copy = NULL;
         }
     }
     return copy;
 }
 
@@ -7303,17 +7303,17 @@ js_NewXMLObject(JSContext *cx, JSXMLClas
 static JSObject *
 NewXMLObject(JSContext *cx, JSXML *xml)
 {
     JSObject *obj;
 
     obj = NewObjectWithClassProto(cx, &XMLClass, NULL, cx->global());
     if (!obj)
         return NULL;
-    obj->setPrivate(xml);
+    obj->setPrivateGCThing(xml);
     return obj;
 }
 
 JSObject *
 js_GetXMLObject(JSContext *cx, JSXML *xmlArg)
 {
     Rooted<JSXML*> xml(cx, xmlArg);
     JSObject *obj;
@@ -7405,17 +7405,17 @@ js_InitXMLClass(JSContext *cx, JSObject 
     JS_ASSERT(obj->isNative());
 
     RootedObject xmlProto(cx, global->createBlankPrototype(cx, &XMLClass));
     if (!xmlProto)
         return NULL;
     Rooted<JSXML*> xml(cx, js_NewXML(cx, JSXML_CLASS_TEXT));
     if (!xml)
         return NULL;
-    xmlProto->setPrivate(xml);
+    xmlProto->setPrivateGCThing(xml);
     xml->object = xmlProto;
 
     /* Don't count this as a real content-created XML object. */
     if (!cx->runningWithTrustedPrincipals()) {
         JS_ASSERT(sE4XObjectsCreated > 0);
         --sE4XObjectsCreated;
     }
 
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -628,17 +628,17 @@ Debugger::wrapEnvironment(JSContext *cx,
     if (p) {
         envobj = p->value;
     } else {
         /* Create a new Debugger.Environment for env. */
         JSObject *proto = &object->getReservedSlot(JSSLOT_DEBUG_ENV_PROTO).toObject();
         envobj = NewObjectWithGivenProto(cx, &DebuggerEnv_class, proto, NULL);
         if (!envobj)
             return false;
-        envobj->setPrivate(env);
+        envobj->setPrivateGCThing(env);
         envobj->setReservedSlot(JSSLOT_DEBUGENV_OWNER, ObjectValue(*object));
         if (!environments.relookupOrAdd(p, env, envobj)) {
             js_ReportOutOfMemory(cx);
             return false;
         }
 
         CrossCompartmentKey key(CrossCompartmentKey::DebuggerEnvironment, object, env);
         if (!object->compartment()->crossCompartmentWrappers.put(key, ObjectValue(*envobj))) {
@@ -664,22 +664,23 @@ Debugger::wrapDebuggeeValue(JSContext *c
             vp->setObject(*p->value);
         } else {
             /* Create a new Debugger.Object for obj. */
             JSObject *proto = &object->getReservedSlot(JSSLOT_DEBUG_OBJECT_PROTO).toObject();
             JSObject *dobj =
                 NewObjectWithGivenProto(cx, &DebuggerObject_class, proto, NULL);
             if (!dobj)
                 return false;
-            dobj->setPrivate(obj);
+            dobj->setPrivateGCThing(obj);
             dobj->setReservedSlot(JSSLOT_DEBUGOBJECT_OWNER, ObjectValue(*object));
             if (!objects.relookupOrAdd(p, obj, dobj)) {
                 js_ReportOutOfMemory(cx);
                 return false;
             }
+            HashTableWriteBarrierPost(cx->compartment, &objects, obj);
 
             if (obj->compartment() != object->compartment()) {
                 CrossCompartmentKey key(CrossCompartmentKey::DebuggerObject, object, obj);
                 if (!object->compartment()->crossCompartmentWrappers.put(key, ObjectValue(*dobj))) {
                     objects.remove(obj);
                     js_ReportOutOfMemory(cx);
                     return false;
                 }
@@ -1251,39 +1252,35 @@ Debugger::onSingleStep(JSContext *cx, Va
 void
 Debugger::markKeysInCompartment(JSTracer *tracer)
 {
     /*
      * WeakMap::Range is deliberately private, to discourage C++ code from
      * enumerating WeakMap keys. However in this case we need access, so we
      * make a base-class reference. Range is public in HashMap.
      */
-    typedef HashMap<HeapPtrObject, HeapPtrObject, DefaultHasher<HeapPtrObject>, RuntimeAllocPolicy>
-        ObjectMap;
-    const ObjectMap &objStorage = objects;
-    for (ObjectMap::Range r = objStorage.all(); !r.empty(); r.popFront()) {
-        const HeapPtrObject &key = r.front().key;
+    ObjectWeakMap::Base &objStorage = objects;
+    for (ObjectWeakMap::Base::Range r = objStorage.all(); !r.empty(); r.popFront()) {
+        const EncapsulatedPtrObject key = r.front().key;
         HeapPtrObject tmp(key);
         gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
         JS_ASSERT(tmp == key);
     }
 
-    const ObjectMap &envStorage = environments;
-    for (ObjectMap::Range r = envStorage.all(); !r.empty(); r.popFront()) {
-        const HeapPtrObject &key = r.front().key;
+    ObjectWeakMap::Base &envStorage = environments;
+    for (ObjectWeakMap::Base::Range r = envStorage.all(); !r.empty(); r.popFront()) {
+        const EncapsulatedPtrObject &key = r.front().key;
         HeapPtrObject tmp(key);
         js::gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
         JS_ASSERT(tmp == key);
     }
 
-    typedef HashMap<HeapPtrScript, HeapPtrObject, DefaultHasher<HeapPtrScript>, RuntimeAllocPolicy>
-        ScriptMap;
-    const ScriptMap &scriptStorage = scripts;
-    for (ScriptMap::Range r = scriptStorage.all(); !r.empty(); r.popFront()) {
-        const HeapPtrScript &key = r.front().key;
+    const ScriptWeakMap::Base &scriptStorage = scripts;
+    for (ScriptWeakMap::Base::Range r = scriptStorage.all(); !r.empty(); r.popFront()) {
+        const EncapsulatedPtrScript &key = r.front().key;
         HeapPtrScript tmp(key);
         gc::MarkScript(tracer, &tmp, "cross-compartment WeakMap key");
         JS_ASSERT(tmp == key);
     }
 }
 
 /*
  * Ordinarily, WeakMap keys and values are marked because at some point it was
@@ -1343,17 +1340,17 @@ Debugger::markAllIteratively(GCMarker *t
      */
     JSRuntime *rt = trc->runtime;
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         GlobalObjectSet &debuggees = c->getDebuggees();
         for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
             GlobalObject *global = e.front();
             if (!IsObjectMarked(&global))
                 continue;
-            else
+            else if (global != e.front())
                 e.rekeyFront(global);
 
             /*
              * Every debuggee has at least one debugger, so in this case
              * getDebuggers can't return NULL.
              */
             const GlobalObject::DebuggerVector *debuggers = global->getDebuggers();
             JS_ASSERT(debuggers);
@@ -1419,17 +1416,17 @@ Debugger::trace(JSTracer *trc)
      * Mark Debugger.Frame objects. These are all reachable from JS, because the
      * corresponding StackFrames are still on the stack.
      *
      * (Once we support generator frames properly, we will need
      * weakly-referenced Debugger.Frame objects as well, for suspended generator
      * frames.)
      */
     for (FrameMap::Range r = frames.all(); !r.empty(); r.popFront()) {
-        HeapPtrObject &frameobj = r.front().value;
+        RelocatablePtrObject &frameobj = r.front().value;
         JS_ASSERT(frameobj->getPrivate());
         MarkObject(trc, &frameobj, "live Debugger.Frame");
     }
 
     /* Trace the weak map from JSScript instances to Debugger.Script objects. */
     scripts.trace(trc);
 
     /* Trace the referent -> Debugger.Object weak map. */
@@ -1460,17 +1457,17 @@ Debugger::sweepAll(FreeOp *fop)
 
     for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++) {
         /* For each debuggee being GC'd, detach it from all its debuggers. */
         GlobalObjectSet &debuggees = (*c)->getDebuggees();
         for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
             GlobalObject *global = e.front();
             if (!IsObjectMarked(&global))
                 detachAllDebuggersFromGlobal(fop, global, &e);
-            else
+            else if (global != e.front())
                 e.rekeyFront(global);
         }
     }
 }
 
 void
 Debugger::detachAllDebuggersFromGlobal(FreeOp *fop, GlobalObject *global,
                                        GlobalObjectSet::Enum *compartmentEnum)
@@ -2369,17 +2366,17 @@ GetScriptReferent(JSObject *obj)
     JS_ASSERT(obj->getClass() == &DebuggerScript_class);
     return static_cast<JSScript *>(obj->getPrivate());
 }
 
 static inline void
 SetScriptReferent(JSObject *obj, JSScript *script)
 {
     JS_ASSERT(obj->getClass() == &DebuggerScript_class);
-    obj->setPrivate(script);
+    obj->setPrivateGCThing(script);
 }
 
 static void
 DebuggerScript_trace(JSTracer *trc, JSObject *obj)
 {
     /* This comes from a private pointer, so no barrier needed. */
     if (JSScript *script = GetScriptReferent(obj)) {
         MarkCrossCompartmentScriptUnbarriered(trc, &script, "Debugger.Script referent");
@@ -2406,17 +2403,17 @@ Debugger::newDebuggerScript(JSContext *c
     assertSameCompartment(cx, object.get());
 
     JSObject *proto = &object->getReservedSlot(JSSLOT_DEBUG_SCRIPT_PROTO).toObject();
     JS_ASSERT(proto);
     JSObject *scriptobj = NewObjectWithGivenProto(cx, &DebuggerScript_class, proto, NULL);
     if (!scriptobj)
         return NULL;
     scriptobj->setReservedSlot(JSSLOT_DEBUGSCRIPT_OWNER, ObjectValue(*object));
-    scriptobj->setPrivate(script);
+    scriptobj->setPrivateGCThing(script);
 
     return scriptobj;
 }
 
 JSObject *
 Debugger::wrapScript(JSContext *cx, HandleScript script)
 {
     assertSameCompartment(cx, object.get());
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -65,26 +65,28 @@ class Debugger {
      * soon as they leave the stack (see slowPathOnLeaveFrame) and in
      * removeDebuggee.
      *
      * We don't trace the keys of this map (the frames are on the stack and
      * thus necessarily live), but we do trace the values. It's like a WeakMap
      * that way, but since stack frames are not gc-things, the implementation
      * has to be different.
      */
-    typedef HashMap<StackFrame *, HeapPtrObject, DefaultHasher<StackFrame *>, RuntimeAllocPolicy>
-        FrameMap;
+    typedef HashMap<StackFrame *,
+                    RelocatablePtrObject,
+                    DefaultHasher<StackFrame *>,
+                    RuntimeAllocPolicy> FrameMap;
     FrameMap frames;
 
     /* An ephemeral map from JSScript* to Debugger.Script instances. */
-    typedef WeakMap<HeapPtrScript, HeapPtrObject> ScriptWeakMap;
+    typedef WeakMap<EncapsulatedPtrScript, RelocatablePtrObject> ScriptWeakMap;
     ScriptWeakMap scripts;
 
     /* The map from debuggee objects to their Debugger.Object instances. */
-    typedef WeakMap<HeapPtrObject, HeapPtrObject> ObjectWeakMap;
+    typedef WeakMap<EncapsulatedPtrObject, RelocatablePtrObject> ObjectWeakMap;
     ObjectWeakMap objects;
 
     /* The map from debuggee Envs to Debugger.Environment instances. */
     ObjectWeakMap environments;
 
     class FrameRange;
     class ScriptQuery;
 
--- a/js/src/vm/ObjectImpl-inl.h
+++ b/js/src/vm/ObjectImpl-inl.h
@@ -198,32 +198,61 @@ js::ObjectImpl::nativeGetSlotRef(uint32_
 inline const js::Value &
 js::ObjectImpl::nativeGetSlot(uint32_t slot) const
 {
     MOZ_ASSERT(isNative());
     MOZ_ASSERT(slot < slotSpan());
     return getSlot(slot);
 }
 
+static JS_ALWAYS_INLINE JSCompartment *
+ValueCompartment(const js::Value &value)
+{
+    JS_ASSERT(value.isMarkable());
+    return static_cast<js::gc::Cell *>(value.toGCThing())->compartment();
+}
+
+static bool
+IsValueInCompartment(js::Value v, JSCompartment *comp)
+{
+    if (!v.isMarkable())
+        return true;
+    JSCompartment *vcomp = ValueCompartment(v);
+    return vcomp == comp->rt->atomsCompartment || vcomp == comp;
+}
+
 inline void
 js::ObjectImpl::setSlot(uint32_t slot, const js::Value &value)
 {
     MOZ_ASSERT(slotInRange(slot));
+    MOZ_ASSERT(IsValueInCompartment(value, compartment()));
     getSlotRef(slot).set(this->asObjectPtr(), slot, value);
 }
 
 inline void
 js::ObjectImpl::initSlot(uint32_t slot, const js::Value &value)
 {
     MOZ_ASSERT(getSlot(slot).isUndefined() || getSlot(slot).isMagic(JS_ARRAY_HOLE));
     MOZ_ASSERT(slotInRange(slot));
+    MOZ_ASSERT(IsValueInCompartment(value, compartment()));
     initSlotUnchecked(slot, value);
 }
 
 inline void
+js::ObjectImpl::initCrossCompartmentSlot(uint32_t slot, const js::Value &value)
+{
+    MOZ_ASSERT(getSlot(slot).isUndefined() || getSlot(slot).isMagic(JS_ARRAY_HOLE));
+    MOZ_ASSERT(slotInRange(slot));
+    if (value.isMarkable())
+        getSlotRef(slot).init(ValueCompartment(value), this->asObjectPtr(), slot, value);
+    else
+        initSlot(slot, value);
+}
+
+inline void
 js::ObjectImpl::initSlotUnchecked(uint32_t slot, const js::Value &value)
 {
     getSlotAddressUnchecked(slot)->init(this->asObjectPtr(), slot, value);
 }
 
 inline void
 js::ObjectImpl::setFixedSlot(uint32_t slot, const js::Value &value)
 {
@@ -330,18 +359,21 @@ js::ObjectImpl::privateWriteBarrierPre(v
     if (comp->needsBarrier()) {
         if (*old && getClass()->trace)
             getClass()->trace(comp->barrierTracer(), this->asObjectPtr());
     }
 #endif
 }
 
 inline void
-js::ObjectImpl::privateWriteBarrierPost(void **old)
+js::ObjectImpl::privateWriteBarrierPost(void **pprivate)
 {
+#ifdef JSGC_GENERATIONAL
+    compartment()->gcStoreBuffer.putCell(reinterpret_cast<js::gc::Cell **>(pprivate));
+#endif
 }
 
 /* static */ inline void
 js::ObjectImpl::writeBarrierPre(ObjectImpl *obj)
 {
 #ifdef JSGC_INCREMENTAL
     /*
      * This would normally be a null test, but TypeScript::global uses 0x1 as a
@@ -358,16 +390,21 @@ js::ObjectImpl::writeBarrierPre(ObjectIm
         MOZ_ASSERT(tmp == obj->asObjectPtr());
     }
 #endif
 }
 
 /* static */ inline void
 js::ObjectImpl::writeBarrierPost(ObjectImpl *obj, void *addr)
 {
+#ifdef JSGC_GENERATIONAL
+    if (uintptr_t(obj) < 32)
+        return;
+    obj->compartment()->gcStoreBuffer.putCell((Cell **)addr);
+#endif
 }
 
 inline bool
 js::ObjectImpl::hasPrivate() const
 {
     return getClass()->hasPrivate();
 }
 
@@ -396,19 +433,26 @@ js::ObjectImpl::getPrivate(uint32_t nfix
 {
     return privateRef(nfixed);
 }
 
 inline void
 js::ObjectImpl::setPrivate(void *data)
 {
     void **pprivate = &privateRef(numFixedSlots());
-
     privateWriteBarrierPre(pprivate);
     *pprivate = data;
+}
+
+inline void
+js::ObjectImpl::setPrivateGCThing(js::gc::Cell *cell)
+{
+    void **pprivate = &privateRef(numFixedSlots());
+    privateWriteBarrierPre(pprivate);
+    *pprivate = reinterpret_cast<void *>(cell);
     privateWriteBarrierPost(pprivate);
 }
 
 inline void
 js::ObjectImpl::setPrivateUnbarriered(void *data)
 {
     void **pprivate = &privateRef(numFixedSlots());
     *pprivate = data;
--- a/js/src/vm/ObjectImpl.cpp
+++ b/js/src/vm/ObjectImpl.cpp
@@ -450,17 +450,17 @@ DenseElementsHeader::defineElement(JSCon
 
 JSObject *
 js::ArrayBufferDelegate(JSContext *cx, Handle<ObjectImpl*> obj)
 {
     MOZ_ASSERT(obj->hasClass(&ArrayBufferClass));
     if (obj->getPrivate())
         return static_cast<JSObject *>(obj->getPrivate());
     JSObject *delegate = NewObjectWithGivenProto(cx, &ObjectClass, obj->getProto(), NULL);
-    obj->setPrivate(delegate);
+    obj->setPrivateGCThing(delegate);
     return delegate;
 }
 
 template <typename T>
 bool
 TypedElementsHeader<T>::defineElement(JSContext *cx, Handle<ObjectImpl*> obj,
                                       uint32_t index, const PropDesc &desc, bool shouldThrow,
                                       unsigned resolveFlags, bool *succeeded)
--- a/js/src/vm/ObjectImpl.h
+++ b/js/src/vm/ObjectImpl.h
@@ -1200,16 +1200,17 @@ class ObjectImpl : public gc::Cell
         return *getSlotAddress(slot);
     }
 
     inline HeapSlot &nativeGetSlotRef(uint32_t slot);
     inline const Value &nativeGetSlot(uint32_t slot) const;
 
     inline void setSlot(uint32_t slot, const Value &value);
     inline void initSlot(uint32_t slot, const Value &value);
+    inline void initCrossCompartmentSlot(uint32_t slot, const js::Value &value);
     inline void initSlotUnchecked(uint32_t slot, const Value &value);
 
     /* For slots which are known to always be fixed, due to the way they are allocated. */
 
     HeapSlot &getFixedSlotRef(uint32_t slot) {
         MOZ_ASSERT(slot < numFixedSlots());
         return fixedSlots()[slot];
     }
@@ -1265,26 +1266,27 @@ class ObjectImpl : public gc::Cell
     }
 
     /* GC support. */
     static inline ThingRootKind rootKind() { return THING_ROOT_OBJECT; }
     static inline void readBarrier(ObjectImpl *obj);
     static inline void writeBarrierPre(ObjectImpl *obj);
     static inline void writeBarrierPost(ObjectImpl *obj, void *addr);
     inline void privateWriteBarrierPre(void **oldval);
-    inline void privateWriteBarrierPost(void **oldval);
+    inline void privateWriteBarrierPost(void **pprivate);
     void markChildren(JSTracer *trc);
 
     /* Private data accessors. */
 
     inline void *&privateRef(uint32_t nfixed) const; /* XXX should be private, not protected! */
 
     inline bool hasPrivate() const;
     inline void *getPrivate() const;
     inline void setPrivate(void *data);
+    inline void setPrivateGCThing(gc::Cell *cell);
     inline void setPrivateUnbarriered(void *data);
     inline void initPrivate(void *data);
 
     /* Access private data for an object with a known number of fixed slots. */
     inline void *getPrivate(uint32_t nfixed) const;
 
     /* JIT Accessors */
     static size_t offsetOfShape() { return offsetof(ObjectImpl, shape_); }
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -1591,16 +1591,17 @@ DebugScopes::addDebugScope(JSContext *cx
     if (!CanUseDebugScopeMaps(cx))
         return true;
 
     JS_ASSERT(!proxiedScopes.has(&scope));
     if (!proxiedScopes.put(&scope, &debugScope)) {
         js_ReportOutOfMemory(cx);
         return false;
     }
+    HashTableWriteBarrierPost(debugScope.compartment(), &proxiedScopes, &scope);
     return true;
 }
 
 DebugScopeObject *
 DebugScopes::hasDebugScope(JSContext *cx, const ScopeIter &si) const
 {
     JS_ASSERT(!si.hasScopeObject());
     if (MissingScopeMap::Ptr p = missingScopes.lookup(si)) {
--- a/js/src/vm/ScopeObject.h
+++ b/js/src/vm/ScopeObject.h
@@ -519,17 +519,17 @@ class DebugScopeObject : public JSObject
 };
 
 /* Maintains runtime-wide debug scope bookkeeping information. */
 class DebugScopes
 {
     JSRuntime *rt;
 
     /* The map from (non-debug) scopes to debug scopes. */
-    typedef WeakMap<HeapPtrObject, HeapPtrObject> ObjectWeakMap;
+    typedef WeakMap<EncapsulatedPtrObject, RelocatablePtrObject> ObjectWeakMap;
     ObjectWeakMap proxiedScopes;
 
     /*
      * The map from live frames which have optimized-away scopes to the
      * corresponding debug scopes.
      */
     typedef HashMap<ScopeIterKey,
                     ReadBarriered<DebugScopeObject>,
--- a/js/src/vm/String-inl.h
+++ b/js/src/vm/String-inl.h
@@ -12,16 +12,17 @@
 #include "jsprobes.h"
 
 #include "gc/Marking.h"
 #include "String.h"
 
 #include "jsgcinlines.h"
 #include "jsobjinlines.h"
 #include "gc/Barrier-inl.h"
+#include "gc/StoreBuffer.h"
 
 namespace js {
 
 static JS_ALWAYS_INLINE JSFixedString *
 NewShortString(JSContext *cx, const jschar *chars, size_t length)
 {
     SkipRoot skip(cx, &chars);
 
@@ -38,16 +39,32 @@ NewShortString(JSContext *cx, const jsch
 
     jschar *storage = str->init(length);
     PodCopy(storage, chars, length);
     storage[length] = 0;
     Probes::createString(cx, str, length);
     return str;
 }
 
+static inline void
+StringWriteBarrierPost(JSCompartment *comp, JSString **strp)
+{
+#ifdef JSGC_GENERATIONAL
+    comp->gcStoreBuffer.putRelocatableCell(reinterpret_cast<gc::Cell **>(strp));
+#endif
+}
+
+static inline void
+StringWriteBarrierPostRemove(JSCompartment *comp, JSString **strp)
+{
+#ifdef JSGC_GENERATIONAL
+    comp->gcStoreBuffer.removeRelocatableCell(reinterpret_cast<gc::Cell **>(strp));
+#endif
+}
+
 } /* namespace js */
 
 inline void
 JSString::writeBarrierPre(JSString *str)
 {
 #ifdef JSGC_INCREMENTAL
     if (!str)
         return;
@@ -59,16 +76,21 @@ JSString::writeBarrierPre(JSString *str)
         JS_ASSERT(tmp == str);
     }
 #endif
 }
 
 inline void
 JSString::writeBarrierPost(JSString *str, void *addr)
 {
+#ifdef JSGC_GENERATIONAL
+    if (!str)
+        return;
+    str->compartment()->gcStoreBuffer.putCell((Cell **)addr);
+#endif
 }
 
 inline bool
 JSString::needWriteBarrierPre(JSCompartment *comp)
 {
 #ifdef JSGC_INCREMENTAL
     return comp->needsBarrier();
 #else
@@ -101,18 +123,18 @@ JSString::validateLength(JSContext *cx, 
 }
 
 JS_ALWAYS_INLINE void
 JSRope::init(JSString *left, JSString *right, size_t length)
 {
     d.lengthAndFlags = buildLengthAndFlags(length, ROPE_FLAGS);
     d.u1.left = left;
     d.s.u2.right = right;
-    JSString::writeBarrierPost(d.u1.left, &d.u1.left);
-    JSString::writeBarrierPost(d.s.u2.right, &d.s.u2.right);
+    js::StringWriteBarrierPost(compartment(), &d.u1.left);
+    js::StringWriteBarrierPost(compartment(), &d.s.u2.right);
 }
 
 JS_ALWAYS_INLINE JSRope *
 JSRope::new_(JSContext *cx, js::HandleString left, js::HandleString right, size_t length)
 {
     if (!validateLength(cx, length))
         return NULL;
     JSRope *str = (JSRope *)js_NewGCString(cx);
@@ -131,17 +153,17 @@ JSRope::markChildren(JSTracer *trc)
 
 JS_ALWAYS_INLINE void
 JSDependentString::init(JSLinearString *base, const jschar *chars, size_t length)
 {
     JS_ASSERT(!js::IsPoisonedPtr(base));
     d.lengthAndFlags = buildLengthAndFlags(length, DEPENDENT_FLAGS);
     d.u1.chars = chars;
     d.s.u2.base = base;
-    JSString::writeBarrierPost(d.s.u2.base, &d.s.u2.base);
+    js::StringWriteBarrierPost(compartment(), reinterpret_cast<JSString **>(&d.s.u2.base));
 }
 
 JS_ALWAYS_INLINE JSLinearString *
 JSDependentString::new_(JSContext *cx, JSLinearString *base_, const jschar *chars, size_t length)
 {
     JS::Rooted<JSLinearString*> base(cx, base_);
 
     /* Try to avoid long chains of dependent strings. */
--- a/js/src/vm/String.cpp
+++ b/js/src/vm/String.cpp
@@ -180,16 +180,17 @@ JSRope::flattenInternal(JSContext *maybe
      *
      * N.B. This optimization can create chains of dependent strings.
      */
     const size_t wholeLength = length();
     size_t wholeCapacity;
     jschar *wholeChars;
     JSString *str = this;
     jschar *pos;
+    JSCompartment *comp = compartment();
 
     if (this->leftChild()->isExtensible()) {
         JSExtensibleString &left = this->leftChild()->asExtensible();
         size_t capacity = left.capacity();
         if (capacity >= wholeLength) {
             if (b == WithIncrementalBarrier) {
                 JSString::writeBarrierPre(d.u1.left);
                 JSString::writeBarrierPre(d.s.u2.right);
@@ -197,33 +198,35 @@ JSRope::flattenInternal(JSContext *maybe
 
             wholeCapacity = capacity;
             wholeChars = const_cast<jschar *>(left.chars());
             size_t bits = left.d.lengthAndFlags;
             pos = wholeChars + (bits >> LENGTH_SHIFT);
             JS_STATIC_ASSERT(!(EXTENSIBLE_FLAGS & DEPENDENT_FLAGS));
             left.d.lengthAndFlags = bits ^ (EXTENSIBLE_FLAGS | DEPENDENT_FLAGS);
             left.d.s.u2.base = (JSLinearString *)this;  /* will be true on exit */
-            JSString::writeBarrierPost(left.d.s.u2.base, &left.d.s.u2.base);
+            StringWriteBarrierPostRemove(comp, &left.d.u1.left);
+            StringWriteBarrierPost(comp, (JSString **)&left.d.s.u2.base);
             goto visit_right_child;
         }
     }
 
     if (!AllocChars(maybecx, wholeLength, &wholeChars, &wholeCapacity))
         return NULL;
 
     pos = wholeChars;
     first_visit_node: {
         if (b == WithIncrementalBarrier) {
             JSString::writeBarrierPre(str->d.u1.left);
             JSString::writeBarrierPre(str->d.s.u2.right);
         }
 
         JSString &left = *str->d.u1.left;
         str->d.u1.chars = pos;
+        StringWriteBarrierPostRemove(comp, &str->d.u1.left);
         if (left.isRope()) {
             left.d.s.u3.parent = str;          /* Return to this when 'left' done, */
             left.d.lengthAndFlags = 0x200;     /* but goto visit_right_child. */
             str = &left;
             goto first_visit_node;
         }
         size_t len = left.length();
         PodCopy(pos, left.d.u1.chars, len);
@@ -243,22 +246,24 @@ JSRope::flattenInternal(JSContext *maybe
     }
     finish_node: {
         if (str == this) {
             JS_ASSERT(pos == wholeChars + wholeLength);
             *pos = '\0';
             str->d.lengthAndFlags = buildLengthAndFlags(wholeLength, EXTENSIBLE_FLAGS);
             str->d.u1.chars = wholeChars;
             str->d.s.u2.capacity = wholeCapacity;
+            StringWriteBarrierPostRemove(comp, &str->d.u1.left);
+            StringWriteBarrierPostRemove(comp, &str->d.s.u2.right);
             return &this->asFlat();
         }
         size_t progress = str->d.lengthAndFlags;
         str->d.lengthAndFlags = buildLengthAndFlags(pos - str->d.u1.chars, DEPENDENT_FLAGS);
         str->d.s.u2.base = (JSLinearString *)this;       /* will be true on exit */
-        JSString::writeBarrierPost(str->d.s.u2.base, &str->d.s.u2.base);
+        StringWriteBarrierPost(comp, (JSString **)&str->d.s.u2.base);
         str = str->d.s.u3.parent;
         if (progress == 0x200)
             goto visit_right_child;
         JS_ASSERT(progress == 0x300);
         goto finish_node;
     }
 }